hexsha
string | size
int64 | ext
string | lang
string | max_stars_repo_path
string | max_stars_repo_name
string | max_stars_repo_head_hexsha
string | max_stars_repo_licenses
list | max_stars_count
int64 | max_stars_repo_stars_event_min_datetime
string | max_stars_repo_stars_event_max_datetime
string | max_issues_repo_path
string | max_issues_repo_name
string | max_issues_repo_head_hexsha
string | max_issues_repo_licenses
list | max_issues_count
int64 | max_issues_repo_issues_event_min_datetime
string | max_issues_repo_issues_event_max_datetime
string | max_forks_repo_path
string | max_forks_repo_name
string | max_forks_repo_head_hexsha
string | max_forks_repo_licenses
list | max_forks_count
int64 | max_forks_repo_forks_event_min_datetime
string | max_forks_repo_forks_event_max_datetime
string | content
string | avg_line_length
float64 | max_line_length
int64 | alphanum_fraction
float64 | qsc_code_num_words_quality_signal
int64 | qsc_code_num_chars_quality_signal
float64 | qsc_code_mean_word_length_quality_signal
float64 | qsc_code_frac_words_unique_quality_signal
float64 | qsc_code_frac_chars_top_2grams_quality_signal
float64 | qsc_code_frac_chars_top_3grams_quality_signal
float64 | qsc_code_frac_chars_top_4grams_quality_signal
float64 | qsc_code_frac_chars_dupe_5grams_quality_signal
float64 | qsc_code_frac_chars_dupe_6grams_quality_signal
float64 | qsc_code_frac_chars_dupe_7grams_quality_signal
float64 | qsc_code_frac_chars_dupe_8grams_quality_signal
float64 | qsc_code_frac_chars_dupe_9grams_quality_signal
float64 | qsc_code_frac_chars_dupe_10grams_quality_signal
float64 | qsc_code_frac_chars_replacement_symbols_quality_signal
float64 | qsc_code_frac_chars_digital_quality_signal
float64 | qsc_code_frac_chars_whitespace_quality_signal
float64 | qsc_code_size_file_byte_quality_signal
float64 | qsc_code_num_lines_quality_signal
float64 | qsc_code_num_chars_line_max_quality_signal
float64 | qsc_code_num_chars_line_mean_quality_signal
float64 | qsc_code_frac_chars_alphabet_quality_signal
float64 | qsc_code_frac_chars_comments_quality_signal
float64 | qsc_code_cate_xml_start_quality_signal
float64 | qsc_code_frac_lines_dupe_lines_quality_signal
float64 | qsc_code_cate_autogen_quality_signal
float64 | qsc_code_frac_lines_long_string_quality_signal
float64 | qsc_code_frac_chars_string_length_quality_signal
float64 | qsc_code_frac_chars_long_word_length_quality_signal
float64 | qsc_code_frac_lines_string_concat_quality_signal
float64 | qsc_code_cate_encoded_data_quality_signal
float64 | qsc_code_frac_chars_hex_words_quality_signal
float64 | qsc_code_frac_lines_prompt_comments_quality_signal
float64 | qsc_code_frac_lines_assert_quality_signal
float64 | qsc_codepython_cate_ast_quality_signal
float64 | qsc_codepython_frac_lines_func_ratio_quality_signal
float64 | qsc_codepython_cate_var_zero_quality_signal
bool | qsc_codepython_frac_lines_pass_quality_signal
float64 | qsc_codepython_frac_lines_import_quality_signal
float64 | qsc_codepython_frac_lines_simplefunc_quality_signal
float64 | qsc_codepython_score_lines_no_logic_quality_signal
float64 | qsc_codepython_frac_lines_print_quality_signal
float64 | qsc_code_num_words
int64 | qsc_code_num_chars
int64 | qsc_code_mean_word_length
int64 | qsc_code_frac_words_unique
null | qsc_code_frac_chars_top_2grams
int64 | qsc_code_frac_chars_top_3grams
int64 | qsc_code_frac_chars_top_4grams
int64 | qsc_code_frac_chars_dupe_5grams
int64 | qsc_code_frac_chars_dupe_6grams
int64 | qsc_code_frac_chars_dupe_7grams
int64 | qsc_code_frac_chars_dupe_8grams
int64 | qsc_code_frac_chars_dupe_9grams
int64 | qsc_code_frac_chars_dupe_10grams
int64 | qsc_code_frac_chars_replacement_symbols
int64 | qsc_code_frac_chars_digital
int64 | qsc_code_frac_chars_whitespace
int64 | qsc_code_size_file_byte
int64 | qsc_code_num_lines
int64 | qsc_code_num_chars_line_max
int64 | qsc_code_num_chars_line_mean
int64 | qsc_code_frac_chars_alphabet
int64 | qsc_code_frac_chars_comments
int64 | qsc_code_cate_xml_start
int64 | qsc_code_frac_lines_dupe_lines
int64 | qsc_code_cate_autogen
int64 | qsc_code_frac_lines_long_string
int64 | qsc_code_frac_chars_string_length
int64 | qsc_code_frac_chars_long_word_length
int64 | qsc_code_frac_lines_string_concat
null | qsc_code_cate_encoded_data
int64 | qsc_code_frac_chars_hex_words
int64 | qsc_code_frac_lines_prompt_comments
int64 | qsc_code_frac_lines_assert
int64 | qsc_codepython_cate_ast
int64 | qsc_codepython_frac_lines_func_ratio
int64 | qsc_codepython_cate_var_zero
int64 | qsc_codepython_frac_lines_pass
int64 | qsc_codepython_frac_lines_import
int64 | qsc_codepython_frac_lines_simplefunc
int64 | qsc_codepython_score_lines_no_logic
int64 | qsc_codepython_frac_lines_print
int64 | effective
string | hits
int64 |
|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|
762639dc7c647a1a606e5751d0a01a69eca6fd0e
| 1,084
|
py
|
Python
|
src/validx/exc/__init__.py
|
Cottonwood-Technology/ValidX
|
8ade8377e2bf6c5a7835d33a1e74552744ccdcdf
|
[
"BSD-2-Clause"
] | 19
|
2019-11-08T20:22:15.000Z
|
2022-03-21T10:42:45.000Z
|
src/validx/exc/__init__.py
|
Cottonwood-Technology/ValidX
|
8ade8377e2bf6c5a7835d33a1e74552744ccdcdf
|
[
"BSD-2-Clause"
] | 7
|
2020-04-30T09:51:34.000Z
|
2021-10-05T13:11:28.000Z
|
src/validx/exc/__init__.py
|
Cottonwood-Technology/ValidX
|
8ade8377e2bf6c5a7835d33a1e74552744ccdcdf
|
[
"BSD-2-Clause"
] | 3
|
2019-09-25T03:44:21.000Z
|
2020-08-20T14:21:50.000Z
|
from .errors import (
ValidationError,
ConditionError,
InvalidTypeError,
OptionsError,
MinValueError,
MaxValueError,
FloatValueError,
StrDecodeError,
MinLengthError,
MaxLengthError,
TupleLengthError,
PatternMatchError,
DatetimeParseError,
DatetimeTypeError,
RecursionMaxDepthError,
MappingKeyError,
ForbiddenKeyError,
MissingKeyError,
SchemaError,
)
from .markers import Extra, EXTRA_KEY, EXTRA_VALUE, Step
from .formatter import Formatter, format_error
__all__ = [
"ValidationError",
"ConditionError",
"InvalidTypeError",
"OptionsError",
"MinValueError",
"MaxValueError",
"FloatValueError",
"StrDecodeError",
"MinLengthError",
"MaxLengthError",
"TupleLengthError",
"PatternMatchError",
"DatetimeParseError",
"DatetimeTypeError",
"RecursionMaxDepthError",
"MappingKeyError",
"ForbiddenKeyError",
"MissingKeyError",
"SchemaError",
"Extra",
"EXTRA_KEY",
"EXTRA_VALUE",
"Step",
"Formatter",
"format_error",
]
| 20.45283
| 56
| 0.678044
| 66
| 1,084
| 10.984848
| 0.469697
| 0.08
| 0.124138
| 0.157241
| 0.868966
| 0.868966
| 0.794483
| 0.794483
| 0.794483
| 0.794483
| 0
| 0
| 0.22417
| 1,084
| 52
| 57
| 20.846154
| 0.862069
| 0
| 0
| 0
| 0
| 0
| 0.311808
| 0.020295
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| false
| 0
| 0.06
| 0
| 0.06
| 0
| 0
| 0
| 1
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 7
|
52186464303db224a243afb96f6a615e25b7741c
| 7,546
|
py
|
Python
|
RPGGame/mapdata.py
|
seancheng33/PygameProject
|
d59e5b2bcade8d58214f62baa753f30d57897b6f
|
[
"BSD-2-Clause"
] | null | null | null |
RPGGame/mapdata.py
|
seancheng33/PygameProject
|
d59e5b2bcade8d58214f62baa753f30d57897b6f
|
[
"BSD-2-Clause"
] | null | null | null |
RPGGame/mapdata.py
|
seancheng33/PygameProject
|
d59e5b2bcade8d58214f62baa753f30d57897b6f
|
[
"BSD-2-Clause"
] | null | null | null |
'''
@Author : sean cheng
@Email : aya234@163.com
@Create_Time : 2018/6/15
@Program : 地图配置文件
'''
# 角色的初始位置
role_postion = {'base': (10, 12),
'sea': (0, 0)
}
# 地图的入口和出口 可以有多个,比如一个城市的出入口可以是出入到大地图、各种商店、隐藏剧情的房子等等,一个字典中包含多个字典来控制
inter_postion = {'base': {'tosea': (10, 10), 'toitemshop': (5, 5), 'tobank': (15, 20)},
'sea': {'tobase': (5, 5)}
}
# 基本主地图
mapBase = [
['x0y2', 'x0y2', 'x0y2', 'x0y2', 'x0y2', 'x0y2', 'x0y2', 'x0y2', 'x0y2', 'x0y2', 'x0y2', 'x0y2', 'x0y2', 'x0y2',
'x0y2', 'x0y2', 'x0y2', 'x0y2', 'x0y2', 'x0y2', 'x0y2', 'x0y2', 'x0y2', 'x0y2', 'x0y2', 'x0y2', 'x0y2'],
['x0y2', 'x0y2', 'x0y2', 'x0y2', 'x0y2', 'x0y2', 'x0y2', 'x0y2', 'x0y2', 'x0y2', 'x0y2', 'x0y2', 'x0y2', 'x0y2',
'x0y2', 'x0y2', 'x0y2', 'x0y2', 'x0y2', 'x0y2', 'x0y2', 'x0y2', 'x0y2', 'x0y2', 'x0y2', 'x0y2', 'x0y2'],
['x0y2', 'x0y2', 'x0y2', 'x0y2', 'x0y2', 'x0y2', 'x0y2', 'x0y2', 'x0y2', 'x0y2', 'x0y2', 'x0y2', 'x0y2', 'x0y2',
'x0y2', 'x0y2', 'x0y2', 'x0y2', 'x0y2', 'x0y2', 'x0y2', 'x0y2', 'x0y2', 'x0y2', 'x0y2', 'x0y2', 'x0y2'],
['x0y2', 'x0y2', 'x0y2', 'x0y2', 'x0y2', 'x0y2', 'x0y2', 'x0y2', 'x0y2', 'x0y2', 'x0y2', 'x0y2', 'x0y2', 'x0y2',
'x0y2', 'x0y2', 'x0y2', 'x0y2', 'x0y2', 'x0y2', 'x0y2', 'x0y2', 'x0y2', 'x0y2', 'x0y2', 'x0y2', 'x0y2'],
['x0y2', 'x0y2', 'x0y2', 'x0y2', 'x14y16', 'x15y15', 'x15y15', 'x15y16', 'x0y2', 'x0y2', 'x0y2', 'x0y2', 'x0y2',
'x0y2', 'x0y2', 'x0y2', 'x0y2', 'x0y2', 'x0y2', 'x0y2', 'x0y2', 'x0y2', 'x0y2', 'x0y2', 'x0y2', 'x0y2', 'x0y2'],
['x0y2', 'x0y2', 'x0y2', 'x0y2', 'x15y15', 'x15y15', 'x15y15', 'x15y15', 'x0y2', 'x0y2', 'x0y2', 'x0y2', 'x0y2',
'x0y2', 'x0y2', 'x0y2', 'x0y2', 'x0y2', 'x0y2', 'x0y2', 'x0y2', 'x0y2', 'x0y2', 'x0y2', 'x0y2', 'x0y2', 'x0y2'],
['x0y2', 'x0y2', 'x0y2', 'x0y2', 'x15y15', 'x15y15', 'x15y15', 'x15y15', 'x0y2', 'x0y2', 'x0y2', 'x0y2', 'x0y2',
'x0y2', 'x0y2', 'x0y2', 'x0y2', 'x0y2', 'x0y2', 'x0y2', 'x0y2', 'x0y2', 'x0y2', 'x0y2', 'x0y2', 'x0y2', 'x0y2'],
['x0y2', 'x0y2', 'x0y2', 'x0y2', 'x15y15', 'x15y15', 'x15y15', 'x15y15', 'x0y2', 'x0y2', 'x0y2', 'x0y2', 'x0y2',
'x0y2', 'x0y2', 'x0y2', 'x0y2', 'x0y2', 'x0y2', 'x0y2', 'x0y2', 'x0y2', 'x0y2', 'x0y2', 'x0y2', 'x0y2', 'x0y2'],
['x0y2', 'x0y2', 'x0y2', 'x0y2', 'x14y17', 'x15y15', 'x15y15', 'x15y17', 'x0y2', 'x0y2', 'x0y2', 'x0y2', 'x0y2',
'x0y2', 'x0y2', 'x0y2', 'x0y2', 'x0y2', 'x0y2', 'x0y2', 'x0y2', 'x0y2', 'x1y0', 'x0y2', 'x0y2', 'x0y2', 'x0y2'],
['x0y2', 'x0y2', 'x0y2', 'x0y2', 'x0y2', 'x0y2', 'x0y2', 'x0y2', 'x0y2', 'x0y2', 'x0y2', 'x0y2', 'x0y2', 'x0y2',
'x0y2', 'x0y2', 'x0y2', 'x0y2', 'x0y2', 'x0y2', 'x0y2', 'x0y2', 'x1y0', 'x0y2', 'x0y2', 'x0y2', 'x0y2'],
['x0y2', 'x0y2', 'x0y2', 'x0y2', 'x0y2', 'x0y2', 'x0y2', 'x0y2', 'x0y2', 'x0y2', 'x7y7', 'x0y2', 'x0y2', 'x0y2',
'x0y2', 'x0y2', 'x0y2', 'x0y2', 'x0y2', 'x0y2', 'x0y2', 'x0y2', 'x1y0', 'x0y2', 'x0y2', 'x0y2', 'x0y2'],
['x0y2', 'x0y2', 'x0y2', 'x0y2', 'x0y2', 'x0y2', 'x0y2', 'x0y2', 'x0y2', 'x0y2', 'x0y2', 'x0y2', 'x0y2', 'x0y2',
'x0y2', 'x0y2', 'x0y2', 'x0y2', 'x0y2', 'x0y2', 'x0y2', 'x0y2', 'x1y0', 'x0y2', 'x0y2', 'x0y2', 'x0y2'],
['x0y2', 'x0y2', 'x0y2', 'x0y2', 'x0y2', 'x0y2', 'x0y2', 'x0y2', 'x0y2', 'x0y2', 'x0y2', 'x0y2', 'x0y2', 'x0y2',
'x0y2', 'x0y2', 'x0y2', 'x0y2', 'x0y2', 'x0y2', 'x0y2', 'x0y2', 'x1y0', 'x0y2', 'x0y2', 'x0y2', 'x0y2'],
['x0y2', 'x0y2', 'x0y2', 'x0y2', 'x0y2', 'x0y2', 'x0y2', 'x0y2', 'x0y2', 'x0y2', 'x0y2', 'x0y2', 'x0y2', 'x0y2',
'x0y2', 'x0y2', 'x0y2', 'x0y2', 'x0y2', 'x0y2', 'x0y2', 'x0y2', 'x1y0', 'x0y2', 'x0y2', 'x0y2', 'x0y2'],
['x0y2', 'x0y2', 'x0y2', 'x0y2', 'x0y2', 'x0y2', 'x0y2', 'x0y2', 'x0y2', 'x0y2', 'x0y2', 'x0y2', 'x0y2', 'x0y2',
'x0y2', 'x0y2', 'x0y2', 'x0y2', 'x0y2', 'x0y2', 'x0y2', 'x0y2', 'x0y2', 'x0y2', 'x0y2', 'x0y2', 'x0y2'],
['x0y2', 'x0y2', 'x0y2', 'x0y2', 'x0y2', 'x0y2', 'x0y2', 'x0y2', 'x0y2', 'x0y2', 'x0y2', 'x0y2', 'x0y2', 'x0y2',
'x0y2', 'x0y2', 'x0y2', 'x0y2', 'x0y2', 'x0y2', 'x0y2', 'x0y2', 'x0y2', 'x0y2', 'x0y2', 'x0y2', 'x0y2'],
['x0y2', 'x0y2', 'x0y2', 'x0y2', 'x0y2', 'x0y2', 'x2y7', 'x2y7', 'x2y7', 'x2y7', 'x2y7', 'x0y2', 'x0y2', 'x0y2',
'x0y2', 'x0y2', 'x0y2', 'x0y2', 'x0y2', 'x0y2', 'x0y2', 'x0y2', 'x0y2', 'x0y2', 'x0y2', 'x0y2', 'x0y2'],
['x0y2', 'x0y2', 'x0y2', 'x0y2', 'x0y2', 'x0y2', 'x2y7', 'x0y2', 'x0y2', 'x2y7', 'x0y2', 'x0y2', 'x0y2', 'x0y2',
'x0y2', 'x0y2', 'x0y2', 'x0y2', 'x0y2', 'x0y2', 'x0y2', 'x0y2', 'x0y2', 'x0y2', 'x0y2', 'x0y2', 'x0y2'],
['x0y2', 'x0y2', 'x0y2', 'x0y2', 'x0y2', 'x0y2', 'x0y2', 'x2y7', 'x0y2', 'x2y7', 'x0y2', 'x0y2', 'x0y2', 'x0y2',
'x0y2', 'x0y2', 'x0y2', 'x0y2', 'x0y2', 'x0y2', 'x0y2', 'x0y2', 'x0y2', 'x0y2', 'x0y2', 'x0y2', 'x0y2'],
['x0y2', 'x0y2', 'x0y2', 'x0y2', 'x0y2', 'x0y2', 'x0y2', 'x2y7', 'x0y2', 'x2y7', 'x0y2', 'x0y2', 'x0y2', 'x0y2',
'x0y2', 'x0y2', 'x0y2', 'x0y2', 'x0y2', 'x0y2', 'x0y2', 'x0y2', 'x0y2', 'x0y2', 'x0y2', 'x0y2', 'x0y2'],
['x0y2', 'x0y2', 'x0y2', 'x0y2', 'x0y2', 'x0y2', 'x0y2', 'x0y2', 'x0y2', 'x2y7', 'x0y2', 'x0y2', 'x0y2', 'x0y2',
'x0y2', 'x0y2', 'x0y2', 'x0y2', 'x0y2', 'x0y2', 'x0y2', 'x0y2', 'x0y2', 'x0y2', 'x0y2', 'x0y2', 'x0y2'],
['x0y2', 'x0y2', 'x0y2', 'x0y2', 'x0y2', 'x0y2', 'x0y2', 'x0y2', 'x0y2', 'x0y2', 'x0y2', 'x0y2', 'x0y2', 'x0y2',
'x0y2', 'x0y2', 'x0y2', 'x0y2', 'x0y2', 'x0y2', 'x0y2', 'x0y2', 'x0y2', 'x0y2', 'x0y2', 'x0y2', 'x0y2'],
['x0y2', 'x0y2', 'x0y2', 'x0y2', 'x0y2', 'x0y2', 'x0y2', 'x0y2', 'x0y2', 'x0y2', 'x0y2', 'x0y2', 'x0y2', 'x0y2',
'x0y2', 'x0y2', 'x0y2', 'x0y2', 'x0y2', 'x0y2', 'x0y2', 'x0y2', 'x0y2', 'x0y2', 'x0y2', 'x0y2', 'x0y2'],
['x0y2', 'x0y2', 'x0y2', 'x0y2', 'x0y2', 'x0y2', 'x0y2', 'x0y2', 'x0y2', 'x0y2', 'x0y2', 'x0y2', 'x0y2', 'x0y2',
'x0y2', 'x0y2', 'x0y2', 'x0y2', 'x0y2', 'x0y2', 'x0y2', 'x0y2', 'x0y2', 'x0y2', 'x0y2', 'x0y2', 'x0y2'],
['x0y2', 'x0y2', 'x0y2', 'x0y2', 'x0y2', 'x0y2', 'x0y2', 'x0y2', 'x0y2', 'x0y2', 'x0y2', 'x0y2', 'x0y2', 'x0y2',
'x0y2', 'x0y2', 'x0y2', 'x0y2', 'x0y2', 'x0y2', 'x0y2', 'x0y2', 'x0y2', 'x0y2', 'x0y2', 'x0y2', 'x0y2'],
['x0y2', 'x0y2', 'x0y2', 'x0y2', 'x0y2', 'x0y2', 'x0y2', 'x0y2', 'x0y2', 'x0y2', 'x0y2', 'x0y2', 'x0y2', 'x0y2',
'x0y2', 'x0y2', 'x0y2', 'x0y2', 'x0y2', 'x0y2', 'x0y2', 'x0y2', 'x0y2', 'x0y2', 'x0y2', 'x0y2', 'x0y2'],
['x0y2', 'x0y2', 'x0y2', 'x0y2', 'x0y2', 'x0y2', 'x0y2', 'x0y2', 'x0y2', 'x0y2', 'x0y2', 'x0y2', 'x0y2', 'x0y2',
'x0y2', 'x0y2', 'x0y2', 'x0y2', 'x0y2', 'x0y2', 'x0y2', 'x0y2', 'x0y2', 'x0y2', 'x0y2', 'x0y2', 'x0y2']
]
# 一个全是水的小地图
mapSea = [['x0y1', 'x0y1', 'x0y1', 'x0y1', 'x0y1', 'x0y1', 'x0y1', 'x0y1', 'x0y1', 'x0y1'],
['x0y1', 'x0y1', 'x0y1', 'x0y1', 'x0y1', 'x0y1', 'x0y1', 'x0y1', 'x0y1', 'x0y1'],
['x0y1', 'x0y1', 'x0y1', 'x0y1', 'x0y1', 'x0y1', 'x0y1', 'x0y1', 'x0y1', 'x0y1'],
['x0y1', 'x0y1', 'x0y1', 'x0y1', 'x0y1', 'x0y1', 'x0y1', 'x0y1', 'x0y1', 'x0y1'],
['x0y1', 'x0y1', 'x0y1', 'x0y1', 'x0y1', 'x0y1', 'x0y1', 'x0y1', 'x0y1', 'x0y1'],
['x0y1', 'x0y1', 'x0y1', 'x0y1', 'x0y1', 'x1y2', 'x0y1', 'x0y1', 'x0y1', 'x0y1'],
['x0y1', 'x0y1', 'x0y1', 'x0y1', 'x0y1', 'x0y1', 'x0y1', 'x0y1', 'x0y1', 'x0y1'],
['x0y1', 'x0y1', 'x0y1', 'x0y1', 'x0y1', 'x0y1', 'x0y1', 'x0y1', 'x0y1', 'x0y1'],
['x0y1', 'x0y1', 'x0y1', 'x0y1', 'x0y1', 'x0y1', 'x0y1', 'x0y1', 'x0y1', 'x0y1'],
['x0y1', 'x0y1', 'x0y1', 'x0y1', 'x0y1', 'x0y1', 'x0y1', 'x0y1', 'x0y1', 'x0y1']
]
| 85.75
| 117
| 0.476544
| 878
| 7,546
| 4.092255
| 0.056948
| 1.489563
| 2.170888
| 2.814361
| 0.915113
| 0.915113
| 0.915113
| 0.915113
| 0.915113
| 0.915113
| 0
| 0.27695
| 0.172674
| 7,546
| 87
| 118
| 86.735632
| 0.298574
| 0.023986
| 0
| 0.726027
| 0
| 0
| 0.461925
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| false
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 1
| 1
| 1
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 12
|
523cbb457d070a3cf31e7d36371288268aaca809
| 4,681
|
py
|
Python
|
ephyviewer/icons/icons_py3.py
|
rdarie/ephyviewer
|
91c27708ccad3e8a6799da41ac6f528e7e3486a0
|
[
"MIT"
] | null | null | null |
ephyviewer/icons/icons_py3.py
|
rdarie/ephyviewer
|
91c27708ccad3e8a6799da41ac6f528e7e3486a0
|
[
"MIT"
] | null | null | null |
ephyviewer/icons/icons_py3.py
|
rdarie/ephyviewer
|
91c27708ccad3e8a6799da41ac6f528e7e3486a0
|
[
"MIT"
] | null | null | null |
# -*- coding: utf-8 -*-
# Resource object code
#
# Created by: The Resource Compiler for PyQt5 (Qt v5.10.1)
#
# WARNING! All changes made in this file will be lost!
from PyQt5 import QtCore
qt_resource_data = b"\
\x00\x00\x01\x63\
\x3c\
\x73\x76\x67\x20\x78\x6d\x6c\x6e\x73\x3d\x22\x68\x74\x74\x70\x3a\
\x2f\x2f\x77\x77\x77\x2e\x77\x33\x2e\x6f\x72\x67\x2f\x32\x30\x30\
\x30\x2f\x73\x76\x67\x22\x20\x76\x69\x65\x77\x42\x6f\x78\x3d\x22\
\x30\x20\x30\x20\x31\x36\x20\x31\x36\x22\x3e\x0a\x20\x20\x3c\x64\
\x65\x66\x73\x20\x69\x64\x3d\x22\x64\x65\x66\x73\x33\x30\x35\x31\
\x22\x3e\x0a\x20\x20\x20\x20\x3c\x73\x74\x79\x6c\x65\x20\x74\x79\
\x70\x65\x3d\x22\x74\x65\x78\x74\x2f\x63\x73\x73\x22\x20\x69\x64\
\x3d\x22\x63\x75\x72\x72\x65\x6e\x74\x2d\x63\x6f\x6c\x6f\x72\x2d\
\x73\x63\x68\x65\x6d\x65\x22\x3e\x0a\x20\x20\x20\x20\x20\x20\x2e\
\x43\x6f\x6c\x6f\x72\x53\x63\x68\x65\x6d\x65\x2d\x54\x65\x78\x74\
\x20\x7b\x0a\x20\x20\x20\x20\x20\x20\x20\x20\x63\x6f\x6c\x6f\x72\
\x3a\x23\x32\x33\x32\x36\x32\x39\x3b\x0a\x20\x20\x20\x20\x20\x20\
\x7d\x0a\x20\x20\x20\x20\x20\x20\x3c\x2f\x73\x74\x79\x6c\x65\x3e\
\x0a\x20\x20\x3c\x2f\x64\x65\x66\x73\x3e\x0a\x20\x3c\x70\x61\x74\
\x68\x20\x73\x74\x79\x6c\x65\x3d\x22\x66\x69\x6c\x6c\x3a\x63\x75\
\x72\x72\x65\x6e\x74\x43\x6f\x6c\x6f\x72\x3b\x66\x69\x6c\x6c\x2d\
\x6f\x70\x61\x63\x69\x74\x79\x3a\x31\x3b\x73\x74\x72\x6f\x6b\x65\
\x3a\x6e\x6f\x6e\x65\x22\x0a\x20\x20\x20\x20\x20\x20\x20\x64\x3d\
\x22\x6d\x20\x33\x20\x33\x20\x30\x20\x31\x30\x20\x31\x30\x20\x2d\
\x35\x20\x7a\x22\x0a\x20\x20\x20\x20\x20\x63\x6c\x61\x73\x73\x3d\
\x22\x43\x6f\x6c\x6f\x72\x53\x63\x68\x65\x6d\x65\x2d\x54\x65\x78\
\x74\x22\x0a\x20\x20\x20\x20\x20\x2f\x3e\x0a\x3c\x2f\x73\x76\x67\
\x3e\x0a\
\x00\x00\x01\x68\
\x3c\
\x73\x76\x67\x20\x78\x6d\x6c\x6e\x73\x3d\x22\x68\x74\x74\x70\x3a\
\x2f\x2f\x77\x77\x77\x2e\x77\x33\x2e\x6f\x72\x67\x2f\x32\x30\x30\
\x30\x2f\x73\x76\x67\x22\x20\x76\x69\x65\x77\x42\x6f\x78\x3d\x22\
\x30\x20\x30\x20\x31\x36\x20\x31\x36\x22\x3e\x0a\x20\x20\x3c\x64\
\x65\x66\x73\x20\x69\x64\x3d\x22\x64\x65\x66\x73\x33\x30\x35\x31\
\x22\x3e\x0a\x20\x20\x20\x20\x3c\x73\x74\x79\x6c\x65\x20\x74\x79\
\x70\x65\x3d\x22\x74\x65\x78\x74\x2f\x63\x73\x73\x22\x20\x69\x64\
\x3d\x22\x63\x75\x72\x72\x65\x6e\x74\x2d\x63\x6f\x6c\x6f\x72\x2d\
\x73\x63\x68\x65\x6d\x65\x22\x3e\x0a\x20\x20\x20\x20\x20\x20\x2e\
\x43\x6f\x6c\x6f\x72\x53\x63\x68\x65\x6d\x65\x2d\x54\x65\x78\x74\
\x20\x7b\x0a\x20\x20\x20\x20\x20\x20\x20\x20\x63\x6f\x6c\x6f\x72\
\x3a\x23\x32\x33\x32\x36\x32\x39\x3b\x0a\x20\x20\x20\x20\x20\x20\
\x7d\x0a\x20\x20\x20\x20\x20\x20\x3c\x2f\x73\x74\x79\x6c\x65\x3e\
\x0a\x20\x20\x3c\x2f\x64\x65\x66\x73\x3e\x0a\x20\x3c\x70\x61\x74\
\x68\x20\x73\x74\x79\x6c\x65\x3d\x22\x66\x69\x6c\x6c\x3a\x63\x75\
\x72\x72\x65\x6e\x74\x43\x6f\x6c\x6f\x72\x3b\x66\x69\x6c\x6c\x2d\
\x6f\x70\x61\x63\x69\x74\x79\x3a\x31\x3b\x73\x74\x72\x6f\x6b\x65\
\x3a\x6e\x6f\x6e\x65\x22\x0a\x20\x20\x20\x20\x20\x20\x20\x64\x3d\
\x22\x6d\x20\x33\x20\x33\x20\x30\x20\x31\x30\x20\x31\x30\x20\x30\
\x20\x30\x20\x2d\x31\x30\x20\x7a\x22\x0a\x20\x20\x20\x20\x20\x63\
\x6c\x61\x73\x73\x3d\x22\x43\x6f\x6c\x6f\x72\x53\x63\x68\x65\x6d\
\x65\x2d\x54\x65\x78\x74\x22\x0a\x20\x20\x20\x20\x20\x2f\x3e\x0a\
\x3c\x2f\x73\x76\x67\x3e\x0a\
"
qt_resource_name = b"\
\x00\x18\
\x0f\xa4\x8b\xc7\
\x00\x6d\
\x00\x65\x00\x64\x00\x69\x00\x61\x00\x2d\x00\x70\x00\x6c\x00\x61\x00\x79\x00\x62\x00\x61\x00\x63\x00\x6b\x00\x2d\x00\x73\x00\x74\
\x00\x61\x00\x72\x00\x74\x00\x2e\x00\x73\x00\x76\x00\x67\
\x00\x17\
\x09\x10\x67\xc7\
\x00\x6d\
\x00\x65\x00\x64\x00\x69\x00\x61\x00\x2d\x00\x70\x00\x6c\x00\x61\x00\x79\x00\x62\x00\x61\x00\x63\x00\x6b\x00\x2d\x00\x73\x00\x74\
\x00\x6f\x00\x70\x00\x2e\x00\x73\x00\x76\x00\x67\
"
qt_resource_struct_v1 = b"\
\x00\x00\x00\x00\x00\x02\x00\x00\x00\x02\x00\x00\x00\x01\
\x00\x00\x00\x36\x00\x00\x00\x00\x00\x01\x00\x00\x01\x67\
\x00\x00\x00\x00\x00\x00\x00\x00\x00\x01\x00\x00\x00\x00\
"
qt_resource_struct_v2 = b"\
\x00\x00\x00\x00\x00\x02\x00\x00\x00\x02\x00\x00\x00\x01\
\x00\x00\x00\x00\x00\x00\x00\x00\
\x00\x00\x00\x36\x00\x00\x00\x00\x00\x01\x00\x00\x01\x67\
\x00\x00\x01\x6a\x85\x02\x61\xd8\
\x00\x00\x00\x00\x00\x00\x00\x00\x00\x01\x00\x00\x00\x00\
\x00\x00\x01\x6a\x85\x02\x61\xd8\
"
qt_version = QtCore.qVersion().split('.')
if qt_version < ['5', '8', '0']:
rcc_version = 1
qt_resource_struct = qt_resource_struct_v1
else:
rcc_version = 2
qt_resource_struct = qt_resource_struct_v2
def qInitResources():
QtCore.qRegisterResourceData(rcc_version, qt_resource_struct, qt_resource_name, qt_resource_data)
def qCleanupResources():
QtCore.qUnregisterResourceData(rcc_version, qt_resource_struct, qt_resource_name, qt_resource_data)
qInitResources()
| 43.747664
| 129
| 0.726554
| 1,049
| 4,681
| 3.20591
| 0.106768
| 0.146298
| 0.165923
| 0.164139
| 0.856378
| 0.856378
| 0.837348
| 0.837348
| 0.81237
| 0.81237
| 0
| 0.369628
| 0.04059
| 4,681
| 106
| 130
| 44.160377
| 0.379203
| 0.032472
| 0
| 0.555556
| 0
| 0.6
| 0.000885
| 0
| 0
| 1
| 0
| 0
| 0
| 1
| 0.022222
| false
| 0
| 0.011111
| 0
| 0.033333
| 0
| 0
| 0
| 0
| null | 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| null | 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 11
|
5255334a0b0e89d2fad7df908b5b83ccd5f0b0b2
| 14,449
|
py
|
Python
|
tests/test_slack.py
|
tashachin/messages
|
e8370ece45b6eb11e06539121dd293479edb9fe8
|
[
"MIT"
] | 1
|
2021-05-16T06:38:02.000Z
|
2021-05-16T06:38:02.000Z
|
tests/test_slack.py
|
tashachin/messages
|
e8370ece45b6eb11e06539121dd293479edb9fe8
|
[
"MIT"
] | null | null | null |
tests/test_slack.py
|
tashachin/messages
|
e8370ece45b6eb11e06539121dd293479edb9fe8
|
[
"MIT"
] | null | null | null |
"""messages.slack tests."""
import pytest
import requests
import messages.slack
from messages.slack import SlackWebhook
from messages.slack import SlackPost
from messages._eventloop import MESSAGELOOP
##############################################################################
# FIXTURES
##############################################################################
@pytest.fixture()
def get_slackWH(cfg_mock, mocker):
"""Return a valid SlackWebhook object."""
mocker.patch.object(messages.slack, 'check_config_file')
return SlackWebhook(auth='https://testurl.com', body='message',
attachments=['https://url1.com', 'https://url2.com'],
profile='myProfile')
@pytest.fixture()
def get_slackP(cfg_mock, mocker):
"""Return a valid SlackPost object."""
mocker.patch.object(messages.slack, 'check_config_file')
return SlackPost(auth='1234:ABCD', channel='general',
body='message', attachments=['https://url1.com', 'https://url2.com'],
profile='myProfile')
##############################################################################
# TESTS: Slack*.__init__
##############################################################################
def test_slackWH_init(get_slackWH):
"""
GIVEN a need for a SlackWebhook object
WHEN instantiated
THEN assert it is properly created
"""
s = get_slackWH
assert s.body == 'message'
assert s.auth == '***obfuscated***'
assert '_auth' in s.__dict__
assert s._auth == 'https://testurl.com'
assert isinstance(s.message, dict)
def test_slackP_init(get_slackP):
"""
GIVEN a need for a SlackPost object
WHEN instantiated
THEN assert it is properly created
"""
s = get_slackP
assert s.body == 'message'
assert s.auth == '***obfuscated***'
assert '_auth' in s.__dict__
assert s._auth == '1234:ABCD'
assert isinstance(s.message, dict)
##############################################################################
# TESTS: Slack*.__str__
##############################################################################
def test_slackWH_str(get_slackWH, capsys):
"""
GIVEN a SlackWebhook object
WHEN print() is called on the object
THEN assert the proper output prints
"""
s = get_slackWH
print(s)
out, err = capsys.readouterr()
assert "['https://url1.com', 'https://url2.com']" in out
assert "'message'" in out
assert err == ''
def test_slackP_str(get_slackP, capsys):
"""
GIVEN a SlackPost object
WHEN print() is called on the object
THEN assert the proper output prints
"""
s = get_slackP
print(s)
out, err = capsys.readouterr()
assert "['https://url1.com', 'https://url2.com']" in out
assert "'message'" in out
assert 'gener' in out
assert err == ''
##############################################################################
# TESTS: Slack*._construct_message
##############################################################################
def test_slackWH_construct_message(get_slackWH, mocker):
"""
GIVEN a valid SlackWebhook object
WHEN construct_message() is called
THEN assert the message is properly created
"""
add_mock = mocker.patch.object(SlackWebhook, '_add_attachments')
s = get_slackWH
s._construct_message()
assert s.message['text'] == 'message'
assert add_mock.call_count == 1
def test_slackP_construct_message(get_slackP, mocker):
"""
GIVEN a valid SlackPost object
WHEN construct_message() is called
THEN assert the message is properly created
"""
add_mock = mocker.patch.object(SlackPost, '_add_attachments')
s = get_slackP
s._construct_message()
assert s.message['text'] == 'message'
assert add_mock.call_count == 1
def test_slackWH_construct_message_withFromSubj(get_slackWH, mocker):
"""
GIVEN a valid SlackWebhook object
WHEN construct_message() is called
THEN assert the message is properly created
"""
add_mock = mocker.patch.object(SlackWebhook, '_add_attachments')
s = get_slackWH
s.from_ = 'me'
s.subject = 'Tst Msg'
s._construct_message()
expected = 'From: me\nSubject: Tst Msg\nmessage'
assert s.message['text'] == expected
assert add_mock.call_count == 1
def test_slackP_construct_message_withFromSubj(get_slackP, mocker):
"""
GIVEN a valid SlackPost object
WHEN construct_message() is called
THEN assert the message is properly created
"""
add_mock = mocker.patch.object(SlackPost, '_add_attachments')
s = get_slackP
s.from_ = 'me'
s.subject = 'Tst Msg'
s._construct_message()
expected = 'From: me\nSubject: Tst Msg\nmessage'
assert s.message['text'] == expected
assert add_mock.call_count == 1
##############################################################################
# TESTS: Slack*._add_attachments
##############################################################################
def test_slackWH_add_attachments_list(get_slackWH):
"""
GIVEN a valid SlackWebhook object with self.attach_urls = list
WHEN add_attachments() is called
THEN assert the urls are properly attached to the message
"""
s = get_slackWH
s._add_attachments()
expected = [{'image_url': 'https://url1.com', 'author_name': ''},
{'image_url': 'https://url2.com', 'author_name': ''}]
assert s.message['attachments'] == expected
def test_slackP_add_attachments_list(get_slackP):
"""
GIVEN a valid SlackPost object with self.attach_urls = list
WHEN add_attachments() is called
THEN assert the urls are properly attached to the message
"""
s = get_slackP
s._add_attachments()
expected = [{'image_url': 'https://url1.com', 'author_name': ''},
{'image_url': 'https://url2.com', 'author_name': ''}]
assert s.message['attachments'] == expected
def test_slackWH_add_attachments_str(get_slackWH):
"""
GIVEN a valid SlackWebhook object with self.attach_urls = str
WHEN add_attachments() is called
THEN assert the urls are properly attached to the message
"""
s = get_slackWH
s.attachments = 'https://url1.com'
s._add_attachments()
expected = [{'image_url': 'https://url1.com', 'author_name': ''}]
assert s.message['attachments'] == expected
def test_slackP_add_attachments_str(get_slackP):
"""
GIVEN a valid SlackPost object with self.attach_urls = str
WHEN add_attachments() is called
THEN assert the urls are properly attached to the message
"""
s = get_slackP
s.attachments = 'https://url1.com'
s._add_attachments()
expected = [{'image_url': 'https://url1.com', 'author_name': ''}]
assert s.message['attachments'] == expected
def test_slackWH_add_attachments_with_params(get_slackWH):
"""
GIVEN a valid SlackWebhook object with extra attachment params
WHEN add_attachments() is called
THEN assert the extra params are properly added
"""
s = get_slackWH
s.attachments = 'https://url1.com'
s.params = {'author_name': 'me', 'text': 'image of me'}
s._add_attachments()
expected = [{'image_url': 'https://url1.com', 'author_name': 'me',
'text': 'image of me'}]
assert s.message['attachments'] == expected
def test_slackP_add_attachments_with_params(get_slackP):
"""
GIVEN a valid SlackPost object with extra attachment params
WHEN add_attachments() is called
THEN assert the extra params are properly added
"""
s = get_slackP
s.attachments = 'https://url1.com'
s.params = {'author_name': 'me', 'text': 'image of me'}
s._add_attachments()
expected = [{'image_url': 'https://url1.com', 'author_name': 'me',
'text': 'image of me'}]
assert s.message['attachments'] == expected
##############################################################################
# TESTS: Slack*.send
##############################################################################
def test_slackWH_send_status201(get_slackWH, capsys, mocker):
"""
GIVEN a valid SlackWebhook object
WHEN send() is called
THEN assert the proper send sequence occurs
"""
con_mock = mocker.patch.object(SlackWebhook, '_construct_message')
req_mock = mocker.patch.object(requests, 'post')
req_mock.return_value.status_code = 201
s = get_slackWH
s.send()
out, err = capsys.readouterr()
assert con_mock.call_count == 1
assert req_mock.call_count == 1
assert out == 'Message sent.\n'
assert err == ''
def test_slackWH_send_status301(get_slackWH, capsys, mocker):
"""
GIVEN a valid SlackWebhook object
WHEN send() is called
THEN assert the proper send sequence occurs
"""
con_mock = mocker.patch.object(SlackWebhook, '_construct_message')
req_mock = mocker.patch.object(requests, 'post')
req_mock.return_value.status_code = 301
s = get_slackWH
s.send()
out, err = capsys.readouterr()
assert con_mock.call_count == 1
assert req_mock.call_count == 1
assert 'Error while sending. HTTP status code = 301' in out
assert err == ''
def test_slackP_send_status_201(get_slackP, capsys, mocker):
"""
GIVEN a valid SlackPost object
WHEN send() is called
THEN assert the proper send sequence occurs
"""
con_mock = mocker.patch.object(SlackPost, '_construct_message')
req_mock = mocker.patch.object(requests, 'post')
req_mock.return_value.status_code = 201
s = get_slackP
s.send()
out, err = capsys.readouterr()
assert con_mock.call_count == 1
assert req_mock.call_count == 1
assert out == 'Message sent.\n'
assert err == ''
def test_slackWH_send_verbose_true(get_slackWH, capsys, mocker):
"""
GIVEN a valid SlackWebhook object
WHEN *.send() is called
THEN assert the correct functions are called, correct attributes
updated and correct debug output is generated (using verbose flag
set to True)
"""
con_mock = mocker.patch.object(SlackWebhook, '_construct_message')
req_mock = mocker.patch.object(requests, 'post')
req_mock.return_value.status_code = 201
s = get_slackWH
s.verbose = True
s.send()
out, err = capsys.readouterr()
assert 'Debugging info' in out
assert 'Message created.' in out
assert ' * URL: https://testurl.com' in out
assert ' * From: Not Specified' in out
assert ' * Subject: None' in out
assert ' * Body: \'message\'' in out
assert ' * Attachments: [\'https://url1.com\', \'https://url2.com\']' in out
assert ' * HTTP status code: 201' in out
assert 'Message sent.' in out
assert err == ''
def test_slackWH_send_verbose_false(get_slackWH, capsys, mocker):
"""
GIVEN a valid SlackWebhook object
WHEN *.send() is called
THEN assert the correct functions are called, correct attributes
updated and correct debug output is generated (using verbose flag
set to False)
"""
con_mock = mocker.patch.object(SlackWebhook, '_construct_message')
req_mock = mocker.patch.object(requests, 'post')
req_mock.return_value.status_code = 201
s = get_slackWH
s.verbose = False
s.send()
out, err = capsys.readouterr()
assert 'Debugging info' not in out
assert 'Message created.' not in out
assert ' * URL: https://test_url.com' not in out
assert ' * From: Not Specified' not in out
assert ' * Subject: None' not in out
assert ' * Body: \'message\'' not in out
assert ' * Attachments: [\'https://url1.com\', \'https://url2.com\']' not in out
assert 'Message sent.' in out
assert err == ''
def test_slackP_send_verbose_true(get_slackP, capsys, mocker):
"""
GIVEN a valid SlackPost object
WHEN *.send() is called
THEN assert the correct functions are called, correct attributes
updated and correct debug output is generated (using verbose flag
set to True)
"""
con_mock = mocker.patch.object(SlackPost, '_construct_message')
req_mock = mocker.patch.object(requests, 'post')
req_mock.return_value.status_code = 201
s = get_slackP
s.verbose = True
s.send()
out, err = capsys.readouterr()
assert 'Debugging info' in out
assert 'Message created.' in out
assert ' * Channel: general' in out
assert ' * From: Not Specified' in out
assert ' * Subject: None' in out
assert ' * Body: \'message\'' in out
assert ' * Attachments: [\'https://url1.com\', \'https://url2.com\']' in out
assert 'Message sent.' in out
assert err == ''
def test_slackP_send_verbose_false(get_slackP, capsys, mocker):
"""
GIVEN a valid SlackPost object
WHEN *.send() is called
THEN assert the correct functions are called, correct attributes
updated and correct debug output is generated (using verbose flag
set to False)
"""
con_mock = mocker.patch.object(SlackPost, '_construct_message')
req_mock = mocker.patch.object(requests, 'post')
req_mock.return_value.status_code = 201
s = get_slackP
s.verbose = False
s.send()
out, err = capsys.readouterr()
assert 'Debugging info' not in out
assert 'Message created.' not in out
assert ' * Channel: general' not in out
assert ' * From: Not Specified' not in out
assert ' * Subject: None' not in out
assert ' * Body: \'message\'' not in out
assert ' * Attachments: [\'https://url1.com\', \'https://url2.com\']' not in out
assert 'Message sent.' in out
assert err == ''
##############################################################################
# TESTS: Slack*.send_async
##############################################################################
def test_slackWH_send_async(get_slackWH, mocker):
"""
GIVEN a valid SlackWebhook object
WHEN send_async() is called
THEN assert the proper send sequence occurs
"""
msgloop_mock = mocker.patch.object(MESSAGELOOP, 'add_message')
s = get_slackWH
s.send_async()
assert msgloop_mock.call_count == 1
def test_slackP_send_async(get_slackP, mocker):
"""
GIVEN a valid SlackPost object
WHEN send_async() is called
THEN assert the proper send sequence occurs
"""
msgloop_mock = mocker.patch.object(MESSAGELOOP, 'add_message')
s = get_slackP
s.send_async()
assert msgloop_mock.call_count == 1
| 33.216092
| 84
| 0.618728
| 1,758
| 14,449
| 4.910125
| 0.081911
| 0.02259
| 0.049699
| 0.048656
| 0.904889
| 0.876506
| 0.866775
| 0.860519
| 0.849745
| 0.831094
| 0
| 0.007277
| 0.201052
| 14,449
| 434
| 85
| 33.292627
| 0.740471
| 0.229497
| 0
| 0.803493
| 0
| 0
| 0.196765
| 0
| 0
| 0
| 0
| 0
| 0.358079
| 1
| 0.10917
| false
| 0
| 0.026201
| 0
| 0.144105
| 0.008734
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 7
|
52590544674823dc062556b3bb4ceae6a7a2eef2
| 1,079
|
py
|
Python
|
print statements3.py
|
sarah21-meet/meet2019y1lab3
|
0f212b96736c5ab2578b87a0f8c0777f40155819
|
[
"MIT"
] | null | null | null |
print statements3.py
|
sarah21-meet/meet2019y1lab3
|
0f212b96736c5ab2578b87a0f8c0777f40155819
|
[
"MIT"
] | null | null | null |
print statements3.py
|
sarah21-meet/meet2019y1lab3
|
0f212b96736c5ab2578b87a0f8c0777f40155819
|
[
"MIT"
] | null | null | null |
Python 3.6.8 (default, Jan 14 2019, 11:02:34)
[GCC 8.0.1 20180414 (experimental) [trunk revision 259383]] on linux
Type "help", "copyright", "credits" or "license()" for more information.
>>>
========= RESTART: /home/student/sarah21_lab3/meet2019y1lab3/Echo.py =========
Say Something Codings fun!!
>>> user.upper()
' CODINGS FUN!!'
>>> user.lower()
' codings fun!!'
>>> user.swapcase()
' cODINGS FUN!!'
>>> user.upper()
' CODINGS FUN!!'
>>> user.lower()
' codings fun!!'
>>> user.swapcase()
' cODINGS FUN!!'
>>>
========= RESTART: /home/student/sarah21_lab3/meet2019y1lab3/Echo.py =========
Say Something hi
>>> user
' hi'
>>>
========= RESTART: /home/student/sarah21_lab3/meet2019y1lab3/Echo.py =========
Say Something
========= RESTART: /home/student/sarah21_lab3/meet2019y1lab3/Echo.py =========
Say Something heee
HEEE
>>>
========= RESTART: /home/student/sarah21_lab3/meet2019y1lab3/Echo.py =========
Say Something fff
FFF
fff
>>>
========= RESTART: /home/student/sarah21_lab3/meet2019y1lab3/Echo.py =========
Say Something good!!!
GOOD!!!
good!!!
GOOD!!!
>>>
| 25.690476
| 78
| 0.627433
| 129
| 1,079
| 5.20155
| 0.387597
| 0.104322
| 0.160954
| 0.223547
| 0.739195
| 0.739195
| 0.739195
| 0.739195
| 0.739195
| 0.739195
| 0
| 0.090241
| 0.116775
| 1,079
| 41
| 79
| 26.317073
| 0.613851
| 0
| 0
| 0.634146
| 0
| 0
| 0.107507
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | null | 0
| 0
| null | null | 0
| 0
| 0
| 0
| null | 0
| 0
| 1
| 0
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 7
|
525c2a67cdbc4f37259efedf5701f056ea68ee25
| 109
|
py
|
Python
|
greppa/grep.py
|
SimonLarsen/pandas-grep
|
bebfe8433ff5238ca36797afa77620f7c6e2b002
|
[
"MIT"
] | null | null | null |
greppa/grep.py
|
SimonLarsen/pandas-grep
|
bebfe8433ff5238ca36797afa77620f7c6e2b002
|
[
"MIT"
] | null | null | null |
greppa/grep.py
|
SimonLarsen/pandas-grep
|
bebfe8433ff5238ca36797afa77620f7c6e2b002
|
[
"MIT"
] | null | null | null |
import pandas as pd
def grep(df: pd.DataFrame, pattern: str) -> pd.DataFrame:
return df.query(pattern)
| 18.166667
| 57
| 0.706422
| 17
| 109
| 4.529412
| 0.705882
| 0.285714
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.174312
| 109
| 5
| 58
| 21.8
| 0.855556
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0.333333
| false
| 0
| 0.333333
| 0.333333
| 1
| 0
| 1
| 0
| 0
| null | 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 1
| 1
| 1
| 0
|
0
| 7
|
527da5a1c344c8bcd2fa1f6a16171e72f88e2639
| 26,849
|
py
|
Python
|
hzclient/codec/arefcodec.py
|
hazelcast-incubator/hazelcast-python-client
|
5ec6c908916a6adef648059314923c0dbf71557b
|
[
"Apache-2.0"
] | null | null | null |
hzclient/codec/arefcodec.py
|
hazelcast-incubator/hazelcast-python-client
|
5ec6c908916a6adef648059314923c0dbf71557b
|
[
"Apache-2.0"
] | null | null | null |
hzclient/codec/arefcodec.py
|
hazelcast-incubator/hazelcast-python-client
|
5ec6c908916a6adef648059314923c0dbf71557b
|
[
"Apache-2.0"
] | null | null | null |
import ctypes
from hzclient.clientmessage import ClientMessage
from util import util
import eventconstant
class AtomicReferenceAlterAndGetCodec:
REQUEST_TYPE = AtomicReferenceMessageType.ATOMICREFERENCE_ALTERANDGET
RESPONSE_TYPE = 105
RETRYABLE = False
'''************************ REQUEST *************************'''
class RequestParameters:
def __init__(self):
self.TYPE = AtomicReferenceAlterAndGetCodec.REQUEST_TYPE
self.name=None
self.function=None
@classmethod
def encodeRequest(cls, name, function):
clientMessage = ClientMessage()
clientMessage.setOperationType(AtomicReferenceAlterAndGetCodec.REQUEST_TYPE.id)
clientMessage.setRetryable(AtomicReferenceAlterAndGetCodec.RETRYABLE)
clientMessage.set(name)
clientMessage.set(function)
clientMessage.updateSize()
return clientMessage
@classmethod
def decodeRequest(cls, clientMessage):
parameters = AtomicReferenceAlterAndGetCodec.RequestParameters()
name=None
name = clientMessage.extractStringFromPayload()
parameters.name = name
function=None
function = clientMessage.extractBytesFromPayload()
parameters.function = function
return parameters
'''************************ RESPONSE *************************'''
class ResponseParameters:
def __init__(self):
self.TYPE = AtomicReferenceAlterAndGetCodec.RESPONSE_TYPE
self.response=None
@classmethod
def encodeResponse(cls, response):
clientMessage = ClientMessage()
clientMessage.setOperationType(AtomicReferenceAlterAndGetCodec.RESPONSE_TYPE)
response_isNull=None
if response is None:
response_isNull = True
clientMessage.set(response_isNull)
else:
response_isNull= False
clientMessage.set(response_isNull)
clientMessage.set(response)
return clientMessage
@classmethod
def decodeResponse(cls, clientMessage):
parameters=AtomicReferenceAlterAndGetCodec.ResponseParameters()
response=None
response_isNull = clientMessage.extractBooleanFromPayload()
if not response_isNull:
response = clientMessage.extractBytesFromPayload()
parameters.response = response
return parameters
class AtomicReferenceAlterCodec:
REQUEST_TYPE = AtomicReferenceMessageType.ATOMICREFERENCE_ALTER
RESPONSE_TYPE = 100
RETRYABLE = False
'''************************ REQUEST *************************'''
class RequestParameters:
def __init__(self):
self.TYPE = AtomicReferenceAlterCodec.REQUEST_TYPE
self.name=None
self.function=None
@classmethod
def encodeRequest(cls, name, function):
clientMessage = ClientMessage()
clientMessage.setOperationType(AtomicReferenceAlterCodec.REQUEST_TYPE)
clientMessage.setRetryable(AtomicReferenceAlterCodec.RETRYABLE)
clientMessage.set(name)
clientMessage.set(function)
clientMessage.updateSize()
return clientMessage
@classmethod
def decodeRequest(cls, clientMessage):
parameters = AtomicReferenceAlterCodec.RequestParameters()
name=None
name = clientMessage.extractStringFromPayload()
parameters.name = name
function=None
function = clientMessage.extractBytesFromPayload()
parameters.function = function
return parameters
'''************************ RESPONSE *************************'''
class ResponseParameters:
def __init__(self):
self.TYPE = AtomicReferenceAlterCodec.RESPONSE_TYPE
@classmethod
def encodeResponse(cls, ):
clientMessage = ClientMessage()
clientMessage.setOperationType(AtomicReferenceAlterCodec.RESPONSE_TYPE)
return clientMessage
@classmethod
def decodeResponse(cls, clientMessage):
parameters=AtomicReferenceAlterCodec.ResponseParameters()
return parameters
class AtomicReferenceApplyCodec:
REQUEST_TYPE = AtomicReferenceMessageType.ATOMICREFERENCE_APPLY
RESPONSE_TYPE = 105
RETRYABLE = False
'''************************ REQUEST *************************'''
class RequestParameters:
def __init__(self):
self.TYPE = AtomicReferenceApplyCodec.REQUEST_TYPE
self.name=None
self.function=None
@classmethod
def encodeRequest(cls, name, function):
clientMessage = ClientMessage()
clientMessage.setOperationType(AtomicReferenceApplyCodec.REQUEST_TYPE)
clientMessage.setRetryable(AtomicReferenceApplyCodec.RETRYABLE)
clientMessage.set(name)
clientMessage.set(function)
clientMessage.updateSize()
return clientMessage
@classmethod
def decodeRequest(cls, clientMessage):
parameters = AtomicReferenceApplyCodec.RequestParameters()
name=None
name = clientMessage.extractStringFromPayload()
parameters.name = name
function=None
function = clientMessage.extractBytesFromPayload()
parameters.function = function
return parameters
'''************************ RESPONSE *************************'''
class ResponseParameters:
def __init__(self):
self.TYPE = AtomicReferenceApplyCodec.RESPONSE_TYPE
self.response=None
@classmethod
def encodeResponse(cls, response):
clientMessage = ClientMessage()
clientMessage.setOperationType(AtomicReferenceApplyCodec.RESPONSE_TYPE)
response_isNull=None
if response is None:
response_isNull = True
clientMessage.set(response_isNull)
else:
response_isNull= False
clientMessage.set(response_isNull)
clientMessage.set(response)
return clientMessage
@classmethod
def decodeResponse(cls, clientMessage):
parameters=AtomicReferenceApplyCodec.ResponseParameters()
response=None
response_isNull = clientMessage.extractBooleanFromPayload()
if not response_isNull:
response = clientMessage.extractBytesFromPayload()
parameters.response = response
return parameters
class AtomicReferenceClearCodec:
REQUEST_TYPE = AtomicReferenceMessageType.ATOMICREFERENCE_CLEAR
RESPONSE_TYPE = 100
RETRYABLE = False
'''************************ REQUEST *************************'''
class RequestParameters:
def __init__(self):
self.TYPE = AtomicReferenceClearCodec.REQUEST_TYPE
self.name=None
@classmethod
def encodeRequest(cls, name):
clientMessage = ClientMessage()
clientMessage.setOperationType(AtomicReferenceClearCodec.REQUEST_TYPE)
clientMessage.setRetryable(AtomicReferenceClearCodec.RETRYABLE)
clientMessage.set(name)
clientMessage.updateSize()
return clientMessage
@classmethod
def decodeRequest(cls, clientMessage):
parameters = AtomicReferenceClearCodec.RequestParameters()
name=None
name = clientMessage.extractStringFromPayload()
parameters.name = name
return parameters
'''************************ RESPONSE *************************'''
class ResponseParameters:
def __init__(self):
self.TYPE = AtomicReferenceClearCodec.RESPONSE_TYPE
@classmethod
def encodeResponse(cls, ):
clientMessage = ClientMessage()
clientMessage.setOperationType(AtomicReferenceClearCodec.RESPONSE_TYPE)
return clientMessage
@classmethod
def decodeResponse(cls, clientMessage):
parameters=AtomicReferenceClearCodec.ResponseParameters()
return parameters
class AtomicReferenceCompareAndSetCodec:
REQUEST_TYPE = AtomicReferenceMessageType.ATOMICREFERENCE_COMPAREANDSET
RESPONSE_TYPE = 101
RETRYABLE = False
'''************************ REQUEST *************************'''
class RequestParameters:
def __init__(self):
self.TYPE = AtomicReferenceCompareAndSetCodec.REQUEST_TYPE
self.name=None
self.expected=None
self.updated=None
@classmethod
def encodeRequest(cls, name, expected, updated):
clientMessage = ClientMessage()
clientMessage.setOperationType(AtomicReferenceCompareAndSetCodec.REQUEST_TYPE)
clientMessage.setRetryable(AtomicReferenceCompareAndSetCodec.RETRYABLE)
clientMessage.set(name)
expected_isNull=None
if expected is None:
expected_isNull = True
clientMessage.set(expected_isNull)
else:
expected_isNull= False
clientMessage.set(expected_isNull)
clientMessage.set(expected)
updated_isNull=None
if updated is None:
updated_isNull = True
clientMessage.set(updated_isNull)
else:
updated_isNull= False
clientMessage.set(updated_isNull)
clientMessage.set(updated)
clientMessage.updateSize()
return clientMessage
@classmethod
def decodeRequest(cls, clientMessage):
parameters = AtomicReferenceCompareAndSetCodec.RequestParameters()
name=None
name = clientMessage.extractStringFromPayload()
parameters.name = name
expected=None
expected_isNull = clientMessage.extractBooleanFromPayload()
if not expected_isNull:
expected = clientMessage.extractBytesFromPayload()
parameters.expected = expected
updated=None
updated_isNull = clientMessage.extractBooleanFromPayload()
if not updated_isNull:
updated = clientMessage.extractBytesFromPayload()
parameters.updated = updated
return parameters
'''************************ RESPONSE *************************'''
class ResponseParameters:
def __init__(self):
self.TYPE = AtomicReferenceCompareAndSetCodec.RESPONSE_TYPE
self.response=None
@classmethod
def encodeResponse(cls, response):
clientMessage = ClientMessage()
clientMessage.setOperationType(AtomicReferenceCompareAndSetCodec.RESPONSE_TYPE)
clientMessage.set(response)
return clientMessage
@classmethod
def decodeResponse(cls, clientMessage):
parameters=AtomicReferenceCompareAndSetCodec.ResponseParameters()
response=None
response = clientMessage.extractBooleanFromPayload()
parameters.response = response
return parameters
class AtomicReferenceContainsCodec:
REQUEST_TYPE = AtomicReferenceMessageType.ATOMICREFERENCE_CONTAINS
RESPONSE_TYPE = 101
RETRYABLE = True
'''************************ REQUEST *************************'''
class RequestParameters:
def __init__(self):
self.TYPE = AtomicReferenceContainsCodec.REQUEST_TYPE
self.name=None
self.expected=None
@classmethod
def encodeRequest(cls, name, expected):
clientMessage = ClientMessage()
clientMessage.setOperationType(AtomicReferenceContainsCodec.REQUEST_TYPE)
clientMessage.setRetryable(AtomicReferenceContainsCodec.RETRYABLE)
clientMessage.set(name)
expected_isNull=None
if expected is None:
expected_isNull = True
clientMessage.set(expected_isNull)
else:
expected_isNull= False
clientMessage.set(expected_isNull)
clientMessage.set(expected)
clientMessage.updateSize()
return clientMessage
@classmethod
def decodeRequest(cls, clientMessage):
parameters = AtomicReferenceContainsCodec.RequestParameters()
name=None
name = clientMessage.extractStringFromPayload()
parameters.name = name
expected=None
expected_isNull = clientMessage.extractBooleanFromPayload()
if not expected_isNull:
expected = clientMessage.extractBytesFromPayload()
parameters.expected = expected
return parameters
'''************************ RESPONSE *************************'''
class ResponseParameters:
def __init__(self):
self.TYPE = AtomicReferenceContainsCodec.RESPONSE_TYPE
self.response=None
@classmethod
def encodeResponse(cls, response):
clientMessage = ClientMessage()
clientMessage.setOperationType(AtomicReferenceContainsCodec.RESPONSE_TYPE)
clientMessage.set(response)
return clientMessage
@classmethod
def decodeResponse(cls, clientMessage):
parameters=AtomicReferenceContainsCodec.ResponseParameters()
response=None
response = clientMessage.extractBooleanFromPayload()
parameters.response = response
return parameters
class AtomicReferenceGetAndAlterCodec:
REQUEST_TYPE = AtomicReferenceMessageType.ATOMICREFERENCE_GETANDALTER
RESPONSE_TYPE = 105
RETRYABLE = False
'''************************ REQUEST *************************'''
class RequestParameters:
def __init__(self):
self.TYPE = AtomicReferenceGetAndAlterCodec.REQUEST_TYPE
self.name=None
self.function=None
@classmethod
def encodeRequest(cls, name, function):
clientMessage = ClientMessage()
clientMessage.setOperationType(AtomicReferenceGetAndAlterCodec.REQUEST_TYPE)
clientMessage.setRetryable(AtomicReferenceGetAndAlterCodec.RETRYABLE)
clientMessage.set(name)
clientMessage.set(function)
clientMessage.updateSize()
return clientMessage
@classmethod
def decodeRequest(cls, clientMessage):
parameters = AtomicReferenceGetAndAlterCodec.RequestParameters()
name=None
name = clientMessage.extractStringFromPayload()
parameters.name = name
function=None
function = clientMessage.extractBytesFromPayload()
parameters.function = function
return parameters
'''************************ RESPONSE *************************'''
class ResponseParameters:
def __init__(self):
self.TYPE = AtomicReferenceGetAndAlterCodec.RESPONSE_TYPE
self.response=None
@classmethod
def encodeResponse(cls, response):
clientMessage = ClientMessage()
clientMessage.setOperationType(AtomicReferenceGetAndAlterCodec.RESPONSE_TYPE)
response_isNull=None
if response is None:
response_isNull = True
clientMessage.set(response_isNull)
else:
response_isNull= False
clientMessage.set(response_isNull)
clientMessage.set(response)
return clientMessage
@classmethod
def decodeResponse(cls, clientMessage):
parameters=AtomicReferenceGetAndAlterCodec.ResponseParameters()
response=None
response_isNull = clientMessage.extractBooleanFromPayload()
if not response_isNull:
response = clientMessage.extractBytesFromPayload()
parameters.response = response
return parameters
class AtomicReferenceGetAndSetCodec:
REQUEST_TYPE = AtomicReferenceMessageType.ATOMICREFERENCE_GETANDSET
RESPONSE_TYPE = 105
RETRYABLE = False
'''************************ REQUEST *************************'''
class RequestParameters:
def __init__(self):
self.TYPE = AtomicReferenceGetAndSetCodec.REQUEST_TYPE
self.name=None
self.newValue=None
@classmethod
def encodeRequest(cls, name, newValue):
clientMessage = ClientMessage()
clientMessage.setOperationType(AtomicReferenceGetAndSetCodec.REQUEST_TYPE)
clientMessage.setRetryable(AtomicReferenceGetAndSetCodec.RETRYABLE)
clientMessage.set(name)
newValue_isNull=None
if newValue is None:
newValue_isNull = True
clientMessage.set(newValue_isNull)
else:
newValue_isNull= False
clientMessage.set(newValue_isNull)
clientMessage.set(newValue)
clientMessage.updateSize()
return clientMessage
@classmethod
def decodeRequest(cls, clientMessage):
parameters = AtomicReferenceGetAndSetCodec.RequestParameters()
name=None
name = clientMessage.extractStringFromPayload()
parameters.name = name
newValue=None
newValue_isNull = clientMessage.extractBooleanFromPayload()
if not newValue_isNull:
newValue = clientMessage.extractBytesFromPayload()
parameters.newValue = newValue
return parameters
'''************************ RESPONSE *************************'''
class ResponseParameters:
def __init__(self):
self.TYPE = AtomicReferenceGetAndSetCodec.RESPONSE_TYPE
self.response=None
@classmethod
def encodeResponse(cls, response):
clientMessage = ClientMessage()
clientMessage.setOperationType(AtomicReferenceGetAndSetCodec.RESPONSE_TYPE)
response_isNull=None
if response is None:
response_isNull = True
clientMessage.set(response_isNull)
else:
response_isNull= False
clientMessage.set(response_isNull)
clientMessage.set(response)
return clientMessage
@classmethod
def decodeResponse(cls, clientMessage):
parameters=AtomicReferenceGetAndSetCodec.ResponseParameters()
response=None
response_isNull = clientMessage.extractBooleanFromPayload()
if not response_isNull:
response = clientMessage.extractBytesFromPayload()
parameters.response = response
return parameters
class AtomicReferenceGetCodec:
REQUEST_TYPE = AtomicReferenceMessageType.ATOMICREFERENCE_GET
RESPONSE_TYPE = 105
RETRYABLE = True
'''************************ REQUEST *************************'''
class RequestParameters:
def __init__(self):
self.TYPE = AtomicReferenceGetCodec.REQUEST_TYPE
self.name=None
@classmethod
def encodeRequest(cls, name):
clientMessage = ClientMessage()
clientMessage.setOperationType(AtomicReferenceGetCodec.REQUEST_TYPE)
clientMessage.setRetryable(AtomicReferenceGetCodec.RETRYABLE)
clientMessage.set(name)
clientMessage.updateSize()
return clientMessage
@classmethod
def decodeRequest(cls, clientMessage):
parameters = AtomicReferenceGetCodec.RequestParameters()
name=None
name = clientMessage.extractStringFromPayload()
parameters.name = name
return parameters
'''************************ RESPONSE *************************'''
class ResponseParameters:
def __init__(self):
self.TYPE = AtomicReferenceGetCodec.RESPONSE_TYPE
self.response=None
@classmethod
def encodeResponse(cls, response):
clientMessage = ClientMessage()
clientMessage.setOperationType(AtomicReferenceGetCodec.RESPONSE_TYPE)
response_isNull=None
if response is None:
response_isNull = True
clientMessage.set(response_isNull)
else:
response_isNull= False
clientMessage.set(response_isNull)
clientMessage.set(response)
return clientMessage
@classmethod
def decodeResponse(cls, clientMessage):
parameters=AtomicReferenceGetCodec.ResponseParameters()
response=None
response_isNull = clientMessage.extractBooleanFromPayload()
if not response_isNull:
response = clientMessage.extractBytesFromPayload()
parameters.response = response
return parameters
class AtomicReferenceIsNullCodec:
REQUEST_TYPE = AtomicReferenceMessageType.ATOMICREFERENCE_ISNULL
RESPONSE_TYPE = 101
RETRYABLE = True
'''************************ REQUEST *************************'''
class RequestParameters:
def __init__(self):
self.TYPE = AtomicReferenceIsNullCodec.REQUEST_TYPE
self.name=None
@classmethod
def encodeRequest(cls, name):
clientMessage = ClientMessage()
clientMessage.setOperationType(AtomicReferenceIsNullCodec.REQUEST_TYPE)
clientMessage.setRetryable(AtomicReferenceIsNullCodec.RETRYABLE)
clientMessage.set(name)
clientMessage.updateSize()
return clientMessage
@classmethod
def decodeRequest(cls, clientMessage):
parameters = AtomicReferenceIsNullCodec.RequestParameters()
name=None
name = clientMessage.extractStringFromPayload()
parameters.name = name
return parameters
'''************************ RESPONSE *************************'''
class ResponseParameters:
def __init__(self):
self.TYPE = AtomicReferenceIsNullCodec.RESPONSE_TYPE
self.response=None
@classmethod
def encodeResponse(cls, response):
clientMessage = ClientMessage()
clientMessage.setOperationType(AtomicReferenceIsNullCodec.RESPONSE_TYPE)
clientMessage.set(response)
return clientMessage
@classmethod
def decodeResponse(cls, clientMessage):
parameters=AtomicReferenceIsNullCodec.ResponseParameters()
response=None
response = clientMessage.extractBooleanFromPayload()
parameters.response = response
return parameters
class AtomicReferenceMessageType:
ATOMICREFERENCE_APPLY=0x0b01
ATOMICREFERENCE_ALTER=0x0b02
ATOMICREFERENCE_ALTERANDGET=0x0b03
ATOMICREFERENCE_GETANDALTER=0x0b04
ATOMICREFERENCE_CONTAINS=0x0b05
ATOMICREFERENCE_COMPAREANDSET=0x0b06
ATOMICREFERENCE_GET=0x0b08
ATOMICREFERENCE_SET=0x0b09
ATOMICREFERENCE_CLEAR=0x0b0a
ATOMICREFERENCE_GETANDSET=0x0b0b
ATOMICREFERENCE_SETANDGET=0x0b0c
ATOMICREFERENCE_ISNULL=0x0b0d
def ___init__(self, messageType):
self.id = messageType
def id(self):
return self.id
class AtomicReferenceSetAndGetCodec:
REQUEST_TYPE = AtomicReferenceMessageType.ATOMICREFERENCE_SETANDGET
RESPONSE_TYPE = 105
RETRYABLE = False
'''************************ REQUEST *************************'''
class RequestParameters:
def __init__(self):
self.TYPE = AtomicReferenceSetAndGetCodec.REQUEST_TYPE
self.name=None
self.newValue=None
@classmethod
def encodeRequest(cls, name, newValue):
clientMessage = ClientMessage()
clientMessage.setOperationType(AtomicReferenceSetAndGetCodec.REQUEST_TYPE)
clientMessage.setRetryable(AtomicReferenceSetAndGetCodec.RETRYABLE)
clientMessage.set(name)
newValue_isNull=None
if newValue is None:
newValue_isNull = True
clientMessage.set(newValue_isNull)
else:
newValue_isNull= False
clientMessage.set(newValue_isNull)
clientMessage.set(newValue)
clientMessage.updateSize()
return clientMessage
@classmethod
def decodeRequest(cls, clientMessage):
parameters = AtomicReferenceSetAndGetCodec.RequestParameters()
name=None
name = clientMessage.extractStringFromPayload()
parameters.name = name
newValue=None
newValue_isNull = clientMessage.extractBooleanFromPayload()
if not newValue_isNull:
newValue = clientMessage.extractBytesFromPayload()
parameters.newValue = newValue
return parameters
'''************************ RESPONSE *************************'''
class ResponseParameters:
def __init__(self):
self.TYPE = AtomicReferenceSetAndGetCodec.RESPONSE_TYPE
self.response=None
@classmethod
def encodeResponse(cls, response):
clientMessage = ClientMessage()
clientMessage.setOperationType(AtomicReferenceSetAndGetCodec.RESPONSE_TYPE)
response_isNull=None
if response is None:
response_isNull = True
clientMessage.set(response_isNull)
else:
response_isNull= False
clientMessage.set(response_isNull)
clientMessage.set(response)
return clientMessage
@classmethod
def decodeResponse(cls, clientMessage):
parameters=AtomicReferenceSetAndGetCodec.ResponseParameters()
response=None
response_isNull = clientMessage.extractBooleanFromPayload()
if not response_isNull:
response = clientMessage.extractBytesFromPayload()
parameters.response = response
return parameters
class AtomicReferenceSetCodec:
REQUEST_TYPE = AtomicReferenceMessageType.ATOMICREFERENCE_SET
RESPONSE_TYPE = 100
RETRYABLE = False
'''************************ REQUEST *************************'''
class RequestParameters:
def __init__(self):
self.TYPE = AtomicReferenceSetCodec.REQUEST_TYPE
self.name=None
self.newValue=None
@classmethod
def encodeRequest(cls, name, newValue):
clientMessage = ClientMessage()
clientMessage.setOperationType(AtomicReferenceSetCodec.REQUEST_TYPE)
clientMessage.setRetryable(AtomicReferenceSetCodec.RETRYABLE)
clientMessage.set(name)
newValue_isNull=None
if newValue is None:
newValue_isNull = True
clientMessage.set(newValue_isNull)
else:
newValue_isNull= False
clientMessage.set(newValue_isNull)
clientMessage.set(newValue)
clientMessage.updateSize()
return clientMessage
@classmethod
def decodeRequest(cls, clientMessage):
parameters = AtomicReferenceSetCodec.RequestParameters()
name=None
name = clientMessage.extractStringFromPayload()
parameters.name = name
newValue=None
newValue_isNull = clientMessage.extractBooleanFromPayload()
if not newValue_isNull:
newValue = clientMessage.extractBytesFromPayload()
parameters.newValue = newValue
return parameters
'''************************ RESPONSE *************************'''
class ResponseParameters:
def __init__(self):
self.TYPE = AtomicReferenceSetCodec.RESPONSE_TYPE
@classmethod
def encodeResponse(cls, ):
clientMessage = ClientMessage()
clientMessage.setOperationType(AtomicReferenceSetCodec.RESPONSE_TYPE)
return clientMessage
@classmethod
def decodeResponse(cls, clientMessage):
parameters=AtomicReferenceSetCodec.ResponseParameters()
return parameters
| 34.377721
| 87
| 0.660174
| 1,922
| 26,849
| 9.075963
| 0.044745
| 0.050447
| 0.015765
| 0.020637
| 0.805263
| 0.758542
| 0.720936
| 0.715891
| 0.711419
| 0.711419
| 0
| 0.00391
| 0.23796
| 26,849
| 780
| 88
| 34.421795
| 0.84868
| 0
| 0
| 0.779503
| 0
| 0
| 0
| 0
| 0
| 0
| 0.002846
| 0
| 0
| 1
| 0.114907
| false
| 0
| 0.006211
| 0.001553
| 0.329193
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 7
|
528071058e813a05f987f47081622d3eef45c329
| 10,518
|
py
|
Python
|
DemoUnit/tests/__init__.py
|
appukuttan-shailesh/DemoUnit
|
afbd2ad27433288582a98936d14d778c66f1038e
|
[
"BSD-3-Clause"
] | null | null | null |
DemoUnit/tests/__init__.py
|
appukuttan-shailesh/DemoUnit
|
afbd2ad27433288582a98936d14d778c66f1038e
|
[
"BSD-3-Clause"
] | null | null | null |
DemoUnit/tests/__init__.py
|
appukuttan-shailesh/DemoUnit
|
afbd2ad27433288582a98936d14d778c66f1038e
|
[
"BSD-3-Clause"
] | null | null | null |
import efel
import numpy
import sciunit
import DemoUnit.capabilities as cap
from typing import Dict, Optional
#===============================================================================
class RestingPotential(sciunit.Test):
"""Test the cell's resting membrane potential"""
score_type: sciunit.scores = sciunit.scores.ZScore
"""specifies the type of score returned by the test"""
description: str = ("Test the cell's resting membrane potential")
"""brief description of the test objective"""
def __init__(self,
observation: Dict[str, float] = {'mean': None, 'std': None},
name: str = "Resting Membrane Potential Test") -> None:
self.required_capabilities += (cap.SomaProducesMembranePotential,)
sciunit.Test.__init__(self, observation, name)
#----------------------------------------------------------------------
def validate_observation(self, observation: Dict[str, float]) -> None:
try:
assert len(observation.keys()) == 2
for key, val in observation.items():
assert key in ["mean", "std"]
assert (isinstance(val, int) or isinstance(val, float))
except Exception:
raise sciunit.errors.ObservationError(
("Observation must return a dictionary of the form:"
"{'mean': NUM1, 'std': NUM2}"))
#----------------------------------------------------------------------
def generate_prediction(self, model: sciunit.Model, verbose: bool = False) -> float:
trace = model.get_soma_membrane_potential(tstop=50.0)
prediction = numpy.median(trace[1])
return prediction
#----------------------------------------------------------------------
def compute_score(self, observation: Dict[str, float], prediction: float, verbose: bool = False) -> sciunit.scores.ZScore:
# print ("observation = {}".format(observation))
# print ("prediction = {}".format(prediction))
score = sciunit.scores.ZScore.compute(observation, prediction)
return score
#===============================================================================
class InputResistance(sciunit.Test):
"""Test the cell's input resistance"""
score_type: sciunit.scores = sciunit.scores.ZScore
"""specifies the type of score returned by the test"""
description = ("Test the cell's input resistance")
"""brief description of the test objective"""
def __init__(self,
observation: Dict[str, float] = {'mean': None, 'std': None},
name: str = "Input Resistance Test") -> None:
self.required_capabilities += (cap.SomaReceivesStepCurrent, cap.SomaProducesMembranePotential)
sciunit.Test.__init__(self, observation, name)
#----------------------------------------------------------------------
def validate_observation(self, observation: Dict[str, float]) -> None:
try:
assert len(observation.keys()) == 2
for key, val in observation.items():
assert key in ["mean", "std"]
assert (isinstance(val, int) or isinstance(val, float))
except Exception:
raise sciunit.errors.ObservationError(
("Observation must return a dictionary of the form:"
"{'mean': NUM1, 'std': NUM2}"))
#----------------------------------------------------------------------
def generate_prediction(self, model: sciunit.Model, verbose: bool = False) -> float:
efel.reset()
stim_start = 10.0 # ms
stim_dur = 50.0 # ms
stim_amp = -1.0 # nA
efel.setDoubleSetting('stimulus_current', stim_amp)
model.inject_soma_square_current(current={'delay':stim_start,
'duration':stim_dur,
'amplitude':stim_amp})
trace = model.get_soma_membrane_potential_eFEL_format(tstop=stim_start+stim_dur+stim_start,
start=stim_start,
stop =stim_start+stim_dur)
prediction = efel.getFeatureValues([trace], ['ohmic_input_resistance_vb_ssse'])[0]["ohmic_input_resistance_vb_ssse"][0]
return prediction
#----------------------------------------------------------------------
def compute_score(self, observation: Dict[str, float], prediction: float, verbose: bool = False) -> sciunit.scores.ZScore:
# print ("observation = {}".format(observation))
# print ("prediction = {}".format(prediction))
score = sciunit.scores.ZScore.compute(observation, prediction)
return score
#===============================================================================
class AP_Height(sciunit.Test):
"""Test the cell's AP height"""
score_type: sciunit.scores = sciunit.scores.ZScore
"""specifies the type of score returned by the test"""
description = ("Test the cell's AP height")
"""brief description of the test objective"""
def __init__(self,
observation: Dict[str, float] = {'mean': None, 'std': None},
name: str = "Action Potential Height Test") -> None:
self.required_capabilities += (cap.SomaReceivesStepCurrent, cap.SomaProducesMembranePotential)
sciunit.Test.__init__(self, observation, name)
#----------------------------------------------------------------------
def validate_observation(self, observation: Dict[str, float]) -> None:
try:
assert len(observation.keys()) == 2
for key, val in observation.items():
assert key in ["mean", "std"]
assert (isinstance(val, int) or isinstance(val, float))
except Exception:
raise sciunit.errors.ObservationError(
("Observation must return a dictionary of the form:"
"{'mean': NUM1, 'std': NUM2}"))
#----------------------------------------------------------------------
def generate_prediction(self, model: sciunit.Model, verbose: bool = False) -> Optional[float]:
efel.reset()
stim_start = 10.0 # ms
stim_dur = 5.0 # ms
stim_amp = 15.0 # nA
efel.setDoubleSetting('stimulus_current', stim_amp)
model.inject_soma_square_current(current={'delay':stim_start,
'duration':stim_dur,
'amplitude':stim_amp})
trace = model.get_soma_membrane_potential_eFEL_format(tstop=stim_start+stim_dur+stim_start,
start=stim_start,
stop =stim_start+stim_dur)
output = efel.getFeatureValues([trace], ['AP_amplitude'])[0]["AP_amplitude"]
prediction = output[0] if output else None
return prediction
#----------------------------------------------------------------------
def compute_score(self, observation: Dict[str, float], prediction: Optional[float], verbose: bool = False) -> sciunit.scores.ZScore:
if isinstance(prediction, type(None)):
# score = sciunit.scores.InsufficientDataScore(None)
score = sciunit.scores.ZScore(9999.99)
# see: https://github.com/HumanBrainProject/hbp-validation-framework/issues/253
else:
score = sciunit.scores.ZScore.compute(observation, prediction)
return score
#===============================================================================
class AP_HalfWidth(sciunit.Test):
"""Test the cell's AP half-width"""
score_type: sciunit.scores = sciunit.scores.ZScore
"""specifies the type of score returned by the test"""
description = ("Test the cell's AP half-width")
"""brief description of the test objective"""
def __init__(self,
observation: Dict[str, float] = {'mean': None, 'std': None},
name: str = "Action Potential Half-Width Test") -> None:
self.required_capabilities += (cap.SomaReceivesStepCurrent, cap.SomaProducesMembranePotential)
sciunit.Test.__init__(self, observation, name)
#----------------------------------------------------------------------
def validate_observation(self, observation: Dict[str, float]) -> None:
try:
assert len(observation.keys()) == 2
for key, val in observation.items():
assert key in ["mean", "std"]
assert (isinstance(val, int) or isinstance(val, float))
except Exception:
raise sciunit.errors.ObservationError(
("Observation must return a dictionary of the form:"
"{'mean': NUM1, 'std': NUM2}"))
#----------------------------------------------------------------------
def generate_prediction(self, model: sciunit.Model, verbose: bool = False) -> Optional[float]:
efel.reset()
stim_start = 10.0 # ms
stim_dur = 5.0 # ms
stim_amp = 15.0 # nA
efel.setDoubleSetting('stimulus_current', stim_amp)
model.inject_soma_square_current(current={'delay':stim_start,
'duration':stim_dur,
'amplitude':stim_amp})
trace = model.get_soma_membrane_potential_eFEL_format(tstop=stim_start+stim_dur+stim_start,
start=stim_start,
stop =stim_start+stim_dur)
output = efel.getFeatureValues([trace], ['AP_duration_half_width'])[0]["AP_duration_half_width"]
prediction = output[0] if output else None
return prediction
#----------------------------------------------------------------------
def compute_score(self, observation: Dict[str, float], prediction: Optional[float], verbose: bool = False) -> sciunit.scores.ZScore:
if isinstance(prediction, type(None)):
# score = sciunit.scores.InsufficientDataScore(None)
score = sciunit.scores.ZScore(9999.99)
# see: https://github.com/HumanBrainProject/hbp-validation-framework/issues/253
else:
score = sciunit.scores.ZScore.compute(observation, prediction)
return score
| 46.955357
| 136
| 0.527762
| 984
| 10,518
| 5.5
| 0.138211
| 0.048041
| 0.04915
| 0.04878
| 0.936807
| 0.936807
| 0.900591
| 0.874723
| 0.874723
| 0.874723
| 0
| 0.008287
| 0.265735
| 10,518
| 224
| 137
| 46.955357
| 0.692477
| 0.167047
| 0
| 0.825175
| 0
| 0
| 0.101287
| 0.012511
| 0
| 0
| 0
| 0
| 0.083916
| 1
| 0.111888
| false
| 0
| 0.034965
| 0
| 0.286713
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 7
|
871fe2f86e8994c445929e18cda032ebe0962403
| 33,242
|
py
|
Python
|
Battleship Game/sprites.py
|
ntdai95/Personal-Projects
|
1168a41a4fca19218b5be42ae756e8ef6bac9895
|
[
"MIT"
] | 3
|
2021-06-19T04:42:00.000Z
|
2021-09-08T06:18:21.000Z
|
BattleshipGame (Python)/sprites.py
|
ntdai95-1565490/Personal-Projects
|
4c66e60760293bd3497c0c34cda508fdabf37b89
|
[
"MIT"
] | null | null | null |
BattleshipGame (Python)/sprites.py
|
ntdai95-1565490/Personal-Projects
|
4c66e60760293bd3497c0c34cda508fdabf37b89
|
[
"MIT"
] | null | null | null |
import pygame as pg
from os import path
from settings import *
class Cruiser(pg.sprite.Sprite):
def __init__(self):
super().__init__()
self.image = pg.image.load(path.join(path.join(path.dirname(__file__), "Images"), "cruiser.png")).convert()
self.image.set_colorkey(WHITE)
self.rect = self.image.get_rect()
self.rect.center = [1100, 135]
self.position = "horizontal"
self.position_computer = None
def update(self, rotate_ship, selected_ship, mouse_position, player_grid, game_battle_running, computer_turn, missile_position_y, missile_on, computer_grid, computer_ships_sunk):
if game_battle_running == False and computer_turn == False and (rotate_ship == "cruiser" or (self.position == "horizontal" and (TABLE_POSITION_X + 2 * CELL_SIZE > self.rect.center[0] or self.rect.center[0] > TABLE_POSITION_X - 2 * CELL_SIZE + TABLE_SIZE \
or TABLE_POSITION_Y > self.rect.center[1] or self.rect.center[1] > TABLE_POSITION_Y + TABLE_SIZE)) or (self.position == "vertical" and (TABLE_POSITION_X > self.rect.center[0] \
or self.rect.center[0] > TABLE_POSITION_X + TABLE_SIZE or TABLE_POSITION_Y + 2 * CELL_SIZE > self.rect.center[1] or self.rect.center[1] > TABLE_POSITION_Y - 2 * CELL_SIZE + TABLE_SIZE))):
for row_index in range(11):
for column_index in range(11):
if player_grid[row_index][column_index] == 5:
player_grid[row_index][column_index] = 0
if rotate_ship == "cruiser" and self.position == "horizontal":
self.position = "vertical"
self.image = pg.transform.rotate(self.image, 90)
self.rect = self.image.get_rect(center = [1450, 250])
elif rotate_ship == "cruiser" and self.position == "vertical":
self.position = "horizontal"
self.image = pg.transform.rotate(self.image, 270)
self.rect = self.image.get_rect(center = [1100, 135])
elif selected_ship != "cruiser" and any(5 in row for row in player_grid) and rotate_ship != "cruiser":
if game_battle_running and computer_turn == False:
list_of_cells = []
for row_index in range(11):
for column_index in range(11):
if computer_grid[row_index][column_index] == 5:
list_of_cells.append([column_index, row_index])
center_cell = list_of_cells[2]
if 5 not in computer_ships_sunk:
self.rect.center = tuple([center_cell[0] * CELL_SIZE + CELL_SIZE / 2 - SCREEN_WIDTH + TABLE_POSITION_X_MAIN_BATTLE, center_cell[1] * CELL_SIZE + CELL_SIZE / 2 + TABLE_POSITION_Y])
elif center_cell[0] == list_of_cells[1][0]:
if self.position == "vertical" and self.position_computer == None:
self.image = pg.transform.rotate(self.image, 270)
self.rect = self.image.get_rect(center = [1100, 135])
self.position_computer = "horizontal"
self.rect.center = tuple([center_cell[1] * CELL_SIZE + CELL_SIZE / 2 + TABLE_POSITION_X_MAIN_BATTLE, center_cell[0] * CELL_SIZE + CELL_SIZE / 2 + TABLE_POSITION_Y])
else:
if self.position == "horizontal" and self.position_computer == None:
self.image = pg.transform.rotate(self.image, 90)
self.rect = self.image.get_rect(center = [1450, 250])
self.position_computer = "vertical"
self.rect.center = tuple([center_cell[1] * CELL_SIZE + CELL_SIZE / 2 + TABLE_POSITION_X_MAIN_BATTLE, center_cell[0] * CELL_SIZE + CELL_SIZE / 2 + TABLE_POSITION_Y])
else:
list_of_cells = []
for row_index in range(11):
for column_index in range(11):
if player_grid[row_index][column_index] == 5:
list_of_cells.append([column_index, row_index])
center_cell = list_of_cells[2]
if game_battle_running:
if self.position == "horizontal" and self.position_computer == "vertical":
self.image = pg.transform.rotate(self.image, 270)
self.rect = self.image.get_rect(center = [1100, 135])
self.position_computer = None
elif self.position == "vertical" and self.position_computer == "horizontal":
self.image = pg.transform.rotate(self.image, 90)
self.rect = self.image.get_rect(center = [1450, 250])
self.position_computer = None
self.rect.center = tuple([center_cell[0] * CELL_SIZE + CELL_SIZE / 2 + TABLE_POSITION_X_MAIN_BATTLE_COMPUTER_TURN, center_cell[1] * CELL_SIZE + CELL_SIZE / 2 + TABLE_POSITION_Y])
else:
self.rect.center = tuple([center_cell[0] * CELL_SIZE + CELL_SIZE / 2 + TABLE_POSITION_X, center_cell[1] * CELL_SIZE + CELL_SIZE / 2 + TABLE_POSITION_Y])
elif selected_ship == "cruiser":
self.rect.center = mouse_position
elif any(5 in row for row in player_grid) == False and selected_ship != "cruiser" and self.position == "horizontal" and game_battle_running == False:
self.rect = self.image.get_rect(center = [1100, 135])
elif any(5 in row for row in player_grid) == False and selected_ship != "cruiser" and self.position == "vertical" and game_battle_running == False:
self.rect = self.image.get_rect(center = [1450, 250])
class AircraftCarrier(pg.sprite.Sprite):
def __init__(self):
super().__init__()
self.image = pg.image.load(path.join(path.join(path.dirname(__file__), "Images"), "aircraftcarrier.png")).convert()
self.image.set_colorkey(WHITE)
self.rect = self.image.get_rect()
self.rect.center = [1060, 600]
self.position = "horizontal"
self.position_computer = None
def update(self, rotate_ship, selected_ship, mouse_position, player_grid, game_battle_running, computer_turn, missile_position_y, missile_on, computer_grid, computer_ships_sunk):
if game_battle_running == False and computer_turn == False and (rotate_ship == "aircraftcarrier" or (self.position == "horizontal" and (TABLE_POSITION_X + 2 * CELL_SIZE > self.rect.center[0] or self.rect.center[0] > TABLE_POSITION_X - CELL_SIZE + TABLE_SIZE \
or TABLE_POSITION_Y > self.rect.center[1] or self.rect.center[1] > TABLE_POSITION_Y - CELL_SIZE + TABLE_SIZE)) or (self.position == "vertical" and (TABLE_POSITION_X > self.rect.center[0] \
or self.rect.center[0] > TABLE_POSITION_X - CELL_SIZE + TABLE_SIZE or TABLE_POSITION_Y + CELL_SIZE > self.rect.center[1] or self.rect.center[1] > TABLE_POSITION_Y - 2 * CELL_SIZE + TABLE_SIZE))):
for row_index in range(11):
for column_index in range(11):
if player_grid[row_index][column_index] == 4:
player_grid[row_index][column_index] = 0
if rotate_ship == "aircraftcarrier" and self.position == "horizontal":
self.position = "vertical"
self.image = pg.transform.rotate(self.image, 90)
self.rect = self.image.get_rect(center = [1380, 520])
elif rotate_ship == "aircraftcarrier" and self.position == "vertical":
self.position = "horizontal"
self.image = pg.transform.rotate(self.image, 270)
self.rect = self.image.get_rect(center = [1060, 600])
elif selected_ship != "aircraftcarrier" and any(4 in row for row in player_grid) and rotate_ship != "aircraftcarrier":
if game_battle_running and computer_turn == False:
list_of_cells = []
for row_index in range(11):
for column_index in range(11):
if computer_grid[row_index][column_index] == 4:
list_of_cells.append([column_index, row_index])
center_cell = list_of_cells[1]
if 4 not in computer_ships_sunk:
self.rect.center = tuple([center_cell[0] * CELL_SIZE + CELL_SIZE / 2 - self.image.get_width() * 1/8 + TABLE_POSITION_X_MAIN_BATTLE - SCREEN_WIDTH, center_cell[1] * CELL_SIZE + CELL_SIZE / 2 + self.image.get_height() * 1/4 + TABLE_POSITION_Y])
elif center_cell[0] == list_of_cells[3][0] and center_cell[0] == list_of_cells[5][0]:
if self.position == "vertical" and self.position_computer == None:
self.image = pg.transform.rotate(self.image, 270)
self.rect = self.image.get_rect(center = [1060, 600])
self.position_computer = "horizontal"
center_cell = list_of_cells[4]
self.rect.center = tuple([center_cell[1] * CELL_SIZE + CELL_SIZE / 2 - self.image.get_width() * 1/8 + TABLE_POSITION_X_MAIN_BATTLE, center_cell[0] * CELL_SIZE + CELL_SIZE / 2 + self.image.get_height() * 1/4 + TABLE_POSITION_Y])
else:
if self.position == "horizontal" and self.position_computer == None:
self.image = pg.transform.rotate(self.image, 90)
self.rect = self.image.get_rect(center = [1380, 520])
self.position_computer = "vertical"
self.rect.center = tuple([center_cell[1] * CELL_SIZE + CELL_SIZE / 2 + self.image.get_width() * 1/4 + TABLE_POSITION_X_MAIN_BATTLE, center_cell[0] * CELL_SIZE + CELL_SIZE / 2 + self.image.get_height() * 1/8 + TABLE_POSITION_Y])
else:
list_of_cells = []
for row_index in range(11):
for column_index in range(11):
if player_grid[row_index][column_index] == 4:
list_of_cells.append([column_index, row_index])
center_cell = list_of_cells[2]
if game_battle_running and self.position == "horizontal":
if self.position_computer == "vertical":
self.image = pg.transform.rotate(self.image, 270)
self.rect = self.image.get_rect(center = [1060, 600])
self.position_computer = None
self.rect.center = tuple([center_cell[0] * CELL_SIZE + CELL_SIZE / 2 - self.image.get_width() * 1/8 + TABLE_POSITION_X_MAIN_BATTLE_COMPUTER_TURN, center_cell[1] * CELL_SIZE + CELL_SIZE / 2 + self.image.get_height() * 1/4 + TABLE_POSITION_Y])
elif self.position == "horizontal":
self.rect.center = tuple([center_cell[0] * CELL_SIZE + CELL_SIZE / 2 - self.image.get_width() * 1/8 + TABLE_POSITION_X, center_cell[1] * CELL_SIZE + CELL_SIZE / 2 + self.image.get_height() * 1/4 + TABLE_POSITION_Y])
elif game_battle_running and self.position == "vertical":
if self.position_computer == "horizontal":
self.image = pg.transform.rotate(self.image, 90)
self.rect = self.image.get_rect(center = [1380, 520])
self.position_computer = None
self.rect.center = tuple([center_cell[0] * CELL_SIZE + CELL_SIZE / 2 + self.image.get_width() * 1/4 + TABLE_POSITION_X_MAIN_BATTLE_COMPUTER_TURN, center_cell[1] * CELL_SIZE + CELL_SIZE / 2 + self.image.get_height() * 1/8 + TABLE_POSITION_Y])
else:
self.rect.center = tuple([center_cell[0] * CELL_SIZE + CELL_SIZE / 2 + self.image.get_width() * 1/4 + TABLE_POSITION_X, center_cell[1] * CELL_SIZE + CELL_SIZE / 2 + self.image.get_height() * 1/8 + TABLE_POSITION_Y])
elif selected_ship == "aircraftcarrier" and self.position == "horizontal":
self.rect.center = tuple([mouse_position[0] - self.image.get_width() * 1/8, mouse_position[1] + self.image.get_height() * 1/4])
elif selected_ship == "aircraftcarrier" and self.position == "vertical":
self.rect.center = tuple([mouse_position[0] + self.image.get_width() * 1/4, mouse_position[1] + self.image.get_height() * 1/8])
elif any(4 in row for row in player_grid) == False and selected_ship != "aircraftcarrier" and self.position == "horizontal" and game_battle_running == False:
self.rect = self.image.get_rect(center = [1060, 600])
elif any(4 in row for row in player_grid) == False and selected_ship != "aircraftcarrier" and self.position == "vertical" and game_battle_running == False:
self.rect = self.image.get_rect(center = [1380, 520])
class Destroyer(pg.sprite.Sprite):
def __init__(self):
super().__init__()
self.image = pg.image.load(path.join(path.join(path.dirname(__file__), "Images"), "destroyer.png")).convert()
self.image.set_colorkey(WHITE)
self.rect = self.image.get_rect()
self.rect.center = [1065, 365]
self.position = "horizontal"
self.position_computer = None
def update(self, rotate_ship, selected_ship, mouse_position, player_grid, game_battle_running, computer_turn, missile_position_y, missile_on, computer_grid, computer_ships_sunk):
if game_battle_running == False and computer_turn == False and (rotate_ship == "destroyer" or (self.position == "horizontal" and (TABLE_POSITION_X + 2 * CELL_SIZE > self.rect.center[0] or self.rect.center[0] > TABLE_POSITION_X - CELL_SIZE + TABLE_SIZE \
or TABLE_POSITION_Y > self.rect.center[1] or self.rect.center[1] > TABLE_POSITION_Y + TABLE_SIZE)) or (self.position == "vertical" and (TABLE_POSITION_X > self.rect.center[0] \
or self.rect.center[0] > TABLE_POSITION_X + TABLE_SIZE or TABLE_POSITION_Y + 2 * CELL_SIZE > self.rect.center[1] or self.rect.center[1] > TABLE_POSITION_Y - CELL_SIZE + TABLE_SIZE))):
for row_index in range(11):
for column_index in range(11):
if player_grid[row_index][column_index] == 3:
player_grid[row_index][column_index] = 0
if rotate_ship == "destroyer" and self.position == "horizontal":
self.position = "vertical"
self.image = pg.transform.rotate(self.image, 90)
self.rect = self.image.get_rect(center = [1170, 350])
elif rotate_ship == "destroyer" and self.position == "vertical":
self.position = "horizontal"
self.image = pg.transform.rotate(self.image, 270)
self.rect = self.image.get_rect(center = [1065, 365])
elif selected_ship != "destroyer" and any(3 in row for row in player_grid) and rotate_ship != "destroyer":
if game_battle_running and computer_turn == False:
list_of_cells = []
for row_index in range(11):
for column_index in range(11):
if computer_grid[row_index][column_index] == 3:
list_of_cells.append([column_index, row_index])
center_cell = list_of_cells[1]
if 3 not in computer_ships_sunk:
self.rect.center = tuple([center_cell[0] * CELL_SIZE + CELL_SIZE / 2 - self.image.get_width() * 1/8 + TABLE_POSITION_X_MAIN_BATTLE - SCREEN_WIDTH, center_cell[1] * CELL_SIZE + CELL_SIZE / 2 + TABLE_POSITION_Y])
elif center_cell[0] == list_of_cells[3][0]:
if self.position == "vertical" and self.position_computer == None:
self.image = pg.transform.rotate(self.image, 270)
self.rect = self.image.get_rect(center = [1065, 365])
self.position_computer = "horizontal"
center_cell = list_of_cells[2]
self.rect.center = tuple([center_cell[1] * CELL_SIZE + CELL_SIZE / 2 - self.image.get_width() * 1/8 + TABLE_POSITION_X_MAIN_BATTLE, center_cell[0] * CELL_SIZE + CELL_SIZE / 2 + TABLE_POSITION_Y])
else:
if self.position == "horizontal" and self.position_computer == None:
self.image = pg.transform.rotate(self.image, 90)
self.rect = self.image.get_rect(center = [1170, 350])
self.position_computer = "vertical"
self.rect.center = tuple([center_cell[1] * CELL_SIZE + CELL_SIZE / 2 + TABLE_POSITION_X_MAIN_BATTLE, center_cell[0] * CELL_SIZE + CELL_SIZE / 2 + self.image.get_height() * 1/8 + TABLE_POSITION_Y])
else:
list_of_cells = []
for row_index in range(11):
for column_index in range(11):
if player_grid[row_index][column_index] == 3:
list_of_cells.append([column_index, row_index])
if game_battle_running and self.position == "horizontal":
if self.position_computer == "vertical":
self.image = pg.transform.rotate(self.image, 270)
self.rect = self.image.get_rect(center = [1065, 365])
self.position_computer = None
center_cell = list_of_cells[2]
self.rect.center = tuple([center_cell[0] * CELL_SIZE + CELL_SIZE / 2 - self.image.get_width() * 1/8 + TABLE_POSITION_X_MAIN_BATTLE_COMPUTER_TURN, center_cell[1] * CELL_SIZE + CELL_SIZE / 2 + TABLE_POSITION_Y])
elif self.position == "horizontal":
center_cell = list_of_cells[2]
self.rect.center = tuple([center_cell[0] * CELL_SIZE + CELL_SIZE / 2 - self.image.get_width() * 1/8 + TABLE_POSITION_X, center_cell[1] * CELL_SIZE + CELL_SIZE / 2 + TABLE_POSITION_Y])
elif game_battle_running and self.position == "vertical":
if self.position_computer == "horizontal":
self.image = pg.transform.rotate(self.image, 90)
self.rect = self.image.get_rect(center = [1170, 350])
self.position_computer = None
center_cell = list_of_cells[1]
self.rect.center = tuple([center_cell[0] * CELL_SIZE + CELL_SIZE / 2 + TABLE_POSITION_X_MAIN_BATTLE_COMPUTER_TURN, center_cell[1] * CELL_SIZE + CELL_SIZE / 2 + self.image.get_height() * 1/8 + TABLE_POSITION_Y])
else:
center_cell = list_of_cells[1]
self.rect.center = tuple([center_cell[0] * CELL_SIZE + CELL_SIZE / 2 + TABLE_POSITION_X, center_cell[1] * CELL_SIZE + CELL_SIZE / 2 + self.image.get_height() * 1/8 + TABLE_POSITION_Y])
elif selected_ship == "destroyer" and self.position == "horizontal":
self.rect.center = tuple([mouse_position[0] - self.image.get_width() * 1/8, mouse_position[1]])
elif selected_ship == "destroyer" and self.position == "vertical":
self.rect.center = tuple([mouse_position[0], mouse_position[1] + self.image.get_height() * 1/8])
elif any(3 in row for row in player_grid) == False and selected_ship != "destroyer" and self.position == "horizontal" and game_battle_running == False:
self.rect = self.image.get_rect(center = [1065, 365])
elif any(3 in row for row in player_grid) == False and selected_ship != "destroyer" and self.position == "vertical" and game_battle_running == False:
self.rect = self.image.get_rect(center = [1170, 350])
class Frigate(pg.sprite.Sprite):
def __init__(self):
super().__init__()
self.image = pg.image.load(path.join(path.join(path.dirname(__file__), "Images"), "frigate.png")).convert()
self.image.set_colorkey(WHITE)
self.rect = self.image.get_rect()
self.rect.center = [1030, 480]
self.position = "horizontal"
self.position_computer = None
def update(self, rotate_ship, selected_ship, mouse_position, player_grid, game_battle_running, computer_turn, missile_position_y, missile_on, computer_grid, computer_ships_sunk):
if game_battle_running == False and computer_turn == False and (rotate_ship == "frigate" or (self.position == "horizontal" and (TABLE_POSITION_X + CELL_SIZE > self.rect.center[0] or self.rect.center[0] > TABLE_POSITION_X - CELL_SIZE + TABLE_SIZE \
or TABLE_POSITION_Y > self.rect.center[1] or self.rect.center[1] > TABLE_POSITION_Y + TABLE_SIZE)) or (self.position == "vertical" and (TABLE_POSITION_X > self.rect.center[0] \
or self.rect.center[0] > TABLE_POSITION_X + TABLE_SIZE or TABLE_POSITION_Y + CELL_SIZE > self.rect.center[1] or self.rect.center[1] > TABLE_POSITION_Y - CELL_SIZE + TABLE_SIZE))):
for row_index in range(11):
for column_index in range(11):
if player_grid[row_index][column_index] == 2:
player_grid[row_index][column_index] = 0
if rotate_ship == "frigate" and self.position == "horizontal":
self.position = "vertical"
self.image = pg.transform.rotate(self.image, 90)
self.rect = self.image.get_rect(center = [1260, 480])
elif rotate_ship == "frigate" and self.position == "vertical":
self.position = "horizontal"
self.image = pg.transform.rotate(self.image, 270)
self.rect = self.image.get_rect(center = [1030, 480])
elif selected_ship != "frigate" and any(2 in row for row in player_grid) and rotate_ship != "frigate":
if game_battle_running and computer_turn == False:
list_of_cells = []
for row_index in range(11):
for column_index in range(11):
if computer_grid[row_index][column_index] == 2:
list_of_cells.append([column_index, row_index])
center_cell = list_of_cells[1]
if 2 not in computer_ships_sunk:
self.rect.center = tuple([center_cell[0] * CELL_SIZE + CELL_SIZE / 2 + TABLE_POSITION_X_MAIN_BATTLE - SCREEN_WIDTH, center_cell[1] * CELL_SIZE + CELL_SIZE / 2 + TABLE_POSITION_Y])
elif center_cell[0] == list_of_cells[2][0]:
if self.position == "vertical" and self.position_computer == None:
self.image = pg.transform.rotate(self.image, 270)
self.rect = self.image.get_rect(center = [1030, 480])
self.position_computer = "horizontal"
self.rect.center = tuple([center_cell[1] * CELL_SIZE + CELL_SIZE / 2 + TABLE_POSITION_X_MAIN_BATTLE, center_cell[0] * CELL_SIZE + CELL_SIZE / 2 + TABLE_POSITION_Y])
else:
if self.position == "horizontal" and self.position_computer == None:
self.image = pg.transform.rotate(self.image, 90)
self.rect = self.image.get_rect(center = [1260, 480])
self.position_computer = "vertical"
self.rect.center = tuple([center_cell[1] * CELL_SIZE + CELL_SIZE / 2 + TABLE_POSITION_X_MAIN_BATTLE, center_cell[0] * CELL_SIZE + CELL_SIZE / 2 + TABLE_POSITION_Y])
else:
list_of_cells = []
for row_index in range(11):
for column_index in range(11):
if player_grid[row_index][column_index] == 2:
list_of_cells.append([column_index, row_index])
center_cell = list_of_cells[1]
if game_battle_running:
if self.position == "horizontal" and self.position_computer == "vertical":
self.image = pg.transform.rotate(self.image, 270)
self.rect = self.image.get_rect(center = [1030, 480])
self.position_computer = None
elif self.position == "vertical" and self.position_computer == "horizontal":
self.image = pg.transform.rotate(self.image, 90)
self.rect = self.image.get_rect(center = [1260, 480])
self.position_computer = None
self.rect.center = tuple([center_cell[0] * CELL_SIZE + CELL_SIZE / 2 + TABLE_POSITION_X_MAIN_BATTLE_COMPUTER_TURN, center_cell[1] * CELL_SIZE + CELL_SIZE / 2 + TABLE_POSITION_Y])
else:
self.rect.center = tuple([center_cell[0] * CELL_SIZE + CELL_SIZE / 2 + TABLE_POSITION_X, center_cell[1] * CELL_SIZE + CELL_SIZE / 2 + TABLE_POSITION_Y])
elif selected_ship == "frigate":
self.rect.center = mouse_position
elif any(2 in row for row in player_grid) == False and selected_ship != "frigate" and self.position == "horizontal" and game_battle_running == False:
self.rect = self.image.get_rect(center = [1030, 480])
elif any(2 in row for row in player_grid) == False and selected_ship != "frigate" and self.position == "vertical" and game_battle_running == False:
self.rect = self.image.get_rect(center = [1260, 480])
class Submarine(pg.sprite.Sprite):
def __init__(self):
super().__init__()
self.image = pg.image.load(path.join(path.join(path.dirname(__file__), "Images"), "submarine.png")).convert()
self.image.set_colorkey(WHITE)
self.rect = self.image.get_rect()
self.rect.center = [1015, 250]
self.position = "horizontal"
self.position_computer = None
def update(self, rotate_ship, selected_ship, mouse_position, player_grid, game_battle_running, computer_turn, missile_position_y, missile_on, computer_grid, computer_ships_sunk):
if game_battle_running == False and computer_turn == False and (rotate_ship == "submarine" or (self.position == "horizontal" and (TABLE_POSITION_X + CELL_SIZE > self.rect.center[0] or self.rect.center[0] > TABLE_POSITION_X - CELL_SIZE + TABLE_SIZE \
or TABLE_POSITION_Y > self.rect.center[1] or self.rect.center[1] > TABLE_POSITION_Y + TABLE_SIZE)) or (self.position == "vertical" and (TABLE_POSITION_X > self.rect.center[0] \
or self.rect.center[0] > TABLE_POSITION_X + TABLE_SIZE or TABLE_POSITION_Y + CELL_SIZE > self.rect.center[1] or self.rect.center[1] > TABLE_POSITION_Y - CELL_SIZE + TABLE_SIZE))):
for row_index in range(11):
for column_index in range(11):
if player_grid[row_index][column_index] == 1:
player_grid[row_index][column_index] = 0
if rotate_ship == "submarine" and self.position == "horizontal":
self.position = "vertical"
self.image = pg.transform.rotate(self.image, 90)
self.rect = self.image.get_rect(center = [1300, 250])
elif rotate_ship == "submarine" and self.position == "vertical":
self.position = "horizontal"
self.image = pg.transform.rotate(self.image, 270)
self.rect = self.image.get_rect(center = [1015, 250])
elif selected_ship != "submarine" and any(1 in row for row in player_grid) and rotate_ship != "submarine":
if game_battle_running and computer_turn == False:
list_of_cells = []
for row_index in range(11):
for column_index in range(11):
if computer_grid[row_index][column_index] == 1:
list_of_cells.append([column_index, row_index])
center_cell = list_of_cells[1]
if 1 not in computer_ships_sunk:
self.rect.center = tuple([center_cell[0] * CELL_SIZE + CELL_SIZE / 2 + TABLE_POSITION_X_MAIN_BATTLE - SCREEN_WIDTH, center_cell[1] * CELL_SIZE + CELL_SIZE / 2 + TABLE_POSITION_Y])
elif center_cell[0] == list_of_cells[2][0]:
if self.position == "vertical" and self.position_computer == None:
self.image = pg.transform.rotate(self.image, 270)
self.rect = self.image.get_rect(center = [1015, 250])
self.position_computer = "horizontal"
self.rect.center = tuple([center_cell[1] * CELL_SIZE + CELL_SIZE / 2 + TABLE_POSITION_X_MAIN_BATTLE, center_cell[0] * CELL_SIZE + CELL_SIZE / 2 + TABLE_POSITION_Y])
else:
if self.position == "horizontal" and self.position_computer == None:
self.image = pg.transform.rotate(self.image, 90)
self.rect = self.image.get_rect(center = [1300, 250])
self.position_computer = "vertical"
self.rect.center = tuple([center_cell[1] * CELL_SIZE + CELL_SIZE / 2 + TABLE_POSITION_X_MAIN_BATTLE, center_cell[0] * CELL_SIZE + CELL_SIZE / 2 + TABLE_POSITION_Y])
else:
list_of_cells = []
for row_index in range(11):
for column_index in range(11):
if player_grid[row_index][column_index] == 1:
list_of_cells.append([column_index, row_index])
center_cell = list_of_cells[1]
if game_battle_running:
if self.position == "horizontal" and self.position_computer == "vertical":
self.image = pg.transform.rotate(self.image, 270)
self.rect = self.image.get_rect(center = [1015, 250])
self.position_computer = None
elif self.position == "vertical" and self.position_computer == "horizontal":
self.image = pg.transform.rotate(self.image, 90)
self.rect = self.image.get_rect(center = [1300, 250])
self.position_computer = None
self.rect.center = tuple([center_cell[0] * CELL_SIZE + CELL_SIZE / 2 + TABLE_POSITION_X_MAIN_BATTLE_COMPUTER_TURN, center_cell[1] * CELL_SIZE + CELL_SIZE / 2 + TABLE_POSITION_Y])
else:
self.rect.center = tuple([center_cell[0] * CELL_SIZE + CELL_SIZE / 2 + TABLE_POSITION_X, center_cell[1] * CELL_SIZE + CELL_SIZE / 2 + TABLE_POSITION_Y])
elif selected_ship == "submarine":
self.rect.center = mouse_position
elif any(1 in row for row in player_grid) == False and selected_ship != "submarine" and self.position == "horizontal" and game_battle_running == False:
self.rect = self.image.get_rect(center = [1015, 250])
elif any(1 in row for row in player_grid) == False and selected_ship != "submarine" and self.position == "vertical" and game_battle_running == False:
self.rect = self.image.get_rect(center = [1300, 250])
class RotationSign(pg.sprite.Sprite):
def __init__(self, i):
super().__init__()
self.image = pg.image.load(path.join(path.join(path.dirname(__file__), "Images"), "rotation.png")).convert()
self.image.set_colorkey(WHITE)
self.rect = self.image.get_rect()
self.rect.center = [850, 135 + i * 115]
def update(self, rotate_ship, selected_ship, mouse_position, player_grid, game_battle_running, computer_turn, missile_position_y, missile_on, computer_grid, computer_ships_sunk):
if game_battle_running:
self.kill()
class Missile(pg.sprite.Sprite):
def __init__(self):
super().__init__()
self.image = pg.image.load(path.join(path.join(path.dirname(__file__), "Images"), "missile.png")).convert_alpha()
self.rect = self.image.get_rect()
self.missile_position = "horizontal"
self.missile_speed_x = 10
self.missile_position_x = 0
def update(self, rotate_ship, selected_ship, mouse_position, player_grid, game_battle_running, computer_turn, missile_position_y, missile_on, computer_grid, computer_ships_sunk):
if game_battle_running and computer_turn == False and missile_on == False:
self.missile_position_x = -500
if self.missile_position == "vertical":
self.image = pg.transform.rotate(self.image, 180)
self.missile_position = "horizontal"
elif game_battle_running and computer_turn and missile_on == False:
self.missile_position_x = 2000
if self.missile_position == "horizontal":
self.image = pg.transform.rotate(self.image, 180)
self.missile_position = "vertical"
if game_battle_running and missile_on == False:
self.rect.center = [-100, -100]
elif game_battle_running and computer_turn == False and missile_on:
self.missile_position_x += self.missile_speed_x
self.rect.center = [self.missile_position_x, missile_position_y]
elif game_battle_running and computer_turn and missile_on:
self.missile_position_x -= self.missile_speed_x
self.rect.center = [self.missile_position_x, missile_position_y]
if game_battle_running == False:
self.kill()
| 74.200893
| 267
| 0.612027
| 4,295
| 33,242
| 4.461467
| 0.028405
| 0.070922
| 0.062102
| 0.048429
| 0.970254
| 0.961799
| 0.952771
| 0.93675
| 0.933149
| 0.926469
| 0
| 0.032568
| 0.279526
| 33,242
| 447
| 268
| 74.36689
| 0.767516
| 0
| 0
| 0.727924
| 0
| 0
| 0.042236
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0.033413
| false
| 0
| 0.00716
| 0
| 0.057279
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 7
|
873c3d72dea399db0f240034b1bf5bb4af7c709f
| 6,883
|
py
|
Python
|
2020/solutions/day17.py
|
rsizem2/aoc_2020
|
aa2dbf72a4c44930755bd9cc132ad7854f742f09
|
[
"MIT"
] | null | null | null |
2020/solutions/day17.py
|
rsizem2/aoc_2020
|
aa2dbf72a4c44930755bd9cc132ad7854f742f09
|
[
"MIT"
] | null | null | null |
2020/solutions/day17.py
|
rsizem2/aoc_2020
|
aa2dbf72a4c44930755bd9cc132ad7854f742f09
|
[
"MIT"
] | null | null | null |
from itertools import product
from collections import namedtuple
Range = namedtuple("Range", ["min", "max"])
def read_file3d():
temp = []
with open('../input/day17_input.txt') as file:
for line in file:
temp.append(list(line.strip()))
cubes = set()
for y, row in enumerate(temp):
for x, cube in enumerate(row):
if cube == "#":
cubes.add((x,y,0))
x = Range(0,x)
y = Range(0,y)
z = Range(0,0)
return cubes, (x,y,z)
def read_file4d():
temp = []
with open('../input/day17_input.txt') as file:
for line in file:
temp.append(list(line.strip()))
cubes = set()
for y, row in enumerate(temp):
for x, cube in enumerate(row):
if cube == "#":
cubes.add((x,y,0,0))
x = Range(0,x)
y = Range(0,y)
z = Range(0,0)
w = Range(0,0)
return cubes, (x,y,z,w)
def neighbors3d(x,y,z):
return [(x+a,y+b,z+c) for (a,b,c) in product([-1,0,1], repeat = 3) if (a,b,c) != (0,0,0)]
def neighbors4d(x,y,z,w):
return [(x+a,y+b,z+c,w+d) for (a,b,c,d) in product([-1,0,1], repeat = 4) if (a,b,c,d) != (0,0,0,0)]
def iterate3d(cubes, dims):
new = set()
old_x, old_y, old_z = dims
new_x, new_y, new_z = None, None, None
if old_x is None:
return set(), (None, None, None)
for x in range(old_x.min-1, old_x.max+2):
for y in range(old_y.min-1, old_y.max+2):
for z in range(old_z.min-1, old_z.max+2):
nhbrs = neighbors3d(x,y,z)
temp = [x for x in nhbrs if x in cubes]
if (x,y,z) in cubes and len(temp) in [2,3]:
new.add((x,y,z))
if new_x is None:
new_x = Range(x,x)
elif new_x.min > x:
new_x = Range(x, new_x.max)
elif new_x.max < x:
new_x = Range(new_x.min, x)
if new_y is None:
new_y = Range(y,y)
elif new_y.min > y:
new_y = Range(y, new_y.max)
elif new_y.max < y:
new_y = Range(new_y.min, y)
if new_z is None:
new_z = Range(z,z)
elif new_z.min > z:
new_z = Range(z, new_z.max)
elif new_z.max < z:
new_z = Range(new_z.min, z)
elif (x,y,z) not in cubes and len(temp) == 3:
new.add((x,y,z))
if new_x is None:
new_x = Range(x,x)
elif new_x.min > x:
new_x = Range(x, new_x.max)
elif new_x.max < x:
new_x = Range(new_x.min, x)
if new_y is None:
new_y = Range(y,y)
elif new_y.min > y:
new_y = Range(y, new_y.max)
elif new_y.max < y:
new_y = Range(new_y.min, y)
if new_z is None:
new_z = Range(z,z)
elif new_z.min > z:
new_z = Range(z, new_z.max)
elif new_z.max < z:
new_z = Range(new_z.min, z)
return new, (new_x, new_y, new_z)
def iterate4d(cubes,dims):
new = set()
old_x, old_y, old_z, old_w = dims
new_x, new_y, new_z, new_w = None, None, None, None
if old_x is None:
return set(), (None, None, None, None)
for x in range(old_x.min-1, old_x.max+2):
for y in range(old_y.min-1, old_y.max+2):
for z in range(old_z.min-1, old_z.max+2):
for w in range(old_w.min-1, old_w.max+2):
nhbrs = neighbors4d(x,y,z,w)
temp = [x for x in nhbrs if x in cubes]
if (x,y,z,w) in cubes and len(temp) in [2,3]:
new.add((x,y,z,w))
if new_x is None:
new_x = Range(x,x)
elif new_x.min > x:
new_x = Range(x, new_x.max)
elif new_x.max < x:
new_x = Range(new_x.min, x)
if new_y is None:
new_y = Range(y,y)
elif new_y.min > y:
new_y = Range(y, new_y.max)
elif new_y.max < y:
new_y = Range(new_y.min, y)
if new_z is None:
new_z = Range(z,z)
elif new_z.min > z:
new_z = Range(z, new_z.max)
elif new_z.max < z:
new_z = Range(new_z.min, z)
if new_w is None:
new_w = Range(w,w)
elif new_w.min > w:
new_w = Range(w, new_w.max)
elif new_w.max < w:
new_w = Range(new_w.min, w)
elif (x,y,z,w) not in cubes and len(temp) == 3:
new.add((x,y,z,w))
if new_x is None:
new_x = Range(x,x)
elif new_x.min > x:
new_x = Range(x, new_x.max)
elif new_x.max < x:
new_x = Range(new_x.min, x)
if new_y is None:
new_y = Range(y,y)
elif new_y.min > y:
new_y = Range(y, new_y.max)
elif new_y.max < y:
new_y = Range(new_y.min, y)
if new_z is None:
new_z = Range(z,z)
elif new_z.min > z:
new_z = Range(z, new_z.max)
elif new_z.max < z:
new_z = Range(new_z.min, z)
if new_w is None:
new_w = Range(w,w)
elif new_w.min > w:
new_w = Range(w, new_w.max)
elif new_w.max < w:
new_w = Range(new_w.min, w)
return new, (new_x, new_y, new_z, new_w)
def puzzle1():
cubes, dims = read_file3d()
for i in range(6):
cubes, dims = iterate3d(cubes, dims)
print(len(cubes))
def puzzle2():
cubes, dims = read_file4d()
for i in range(6):
cubes, dims = iterate4d(cubes, dims)
print(len(cubes))
puzzle1()
puzzle2()
| 37.612022
| 103
| 0.401279
| 992
| 6,883
| 2.628024
| 0.068548
| 0.055236
| 0.01611
| 0.030687
| 0.863828
| 0.835443
| 0.821634
| 0.788646
| 0.751438
| 0.751438
| 0
| 0.019813
| 0.486706
| 6,883
| 183
| 104
| 37.612022
| 0.718087
| 0
| 0
| 0.763636
| 0
| 0
| 0.008861
| 0.006973
| 0
| 0
| 0
| 0
| 0
| 1
| 0.048485
| false
| 0
| 0.012121
| 0.012121
| 0.109091
| 0.012121
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 7
|
874e35bfb6db89fe504bb7539af86c18c85f51f0
| 1,394
|
py
|
Python
|
libStash/books/migrations/0012_auto_20201027_2053.py
|
Dev-Rem/libStash
|
a364e9997c1c91b09f5db8a004deb4df305fa8cf
|
[
"MIT"
] | null | null | null |
libStash/books/migrations/0012_auto_20201027_2053.py
|
Dev-Rem/libStash
|
a364e9997c1c91b09f5db8a004deb4df305fa8cf
|
[
"MIT"
] | null | null | null |
libStash/books/migrations/0012_auto_20201027_2053.py
|
Dev-Rem/libStash
|
a364e9997c1c91b09f5db8a004deb4df305fa8cf
|
[
"MIT"
] | null | null | null |
# Generated by Django 3.1.1 on 2020-10-27 20:53
import uuid
from django.db import migrations, models
class Migration(migrations.Migration):
dependencies = [
('books', '0011_auto_20201026_1105'),
]
operations = [
migrations.AddField(
model_name='author',
name='unique_id',
field=models.UUIDField(db_index=True, default=uuid.uuid4, editable=False),
),
migrations.AddField(
model_name='book',
name='unique_id',
field=models.UUIDField(db_index=True, default=uuid.uuid4, editable=False),
),
migrations.AddField(
model_name='image',
name='unique_id',
field=models.UUIDField(db_index=True, default=uuid.uuid4, editable=False),
),
migrations.AddField(
model_name='publisher',
name='unique_id',
field=models.UUIDField(db_index=True, default=uuid.uuid4, editable=False),
),
migrations.AddField(
model_name='warehouse',
name='unique_id',
field=models.UUIDField(db_index=True, default=uuid.uuid4, editable=False),
),
migrations.AddField(
model_name='warehousebook',
name='unique_id',
field=models.UUIDField(db_index=True, default=uuid.uuid4, editable=False),
),
]
| 30.304348
| 86
| 0.587518
| 145
| 1,394
| 5.503448
| 0.310345
| 0.135338
| 0.172932
| 0.203008
| 0.710526
| 0.710526
| 0.710526
| 0.710526
| 0.710526
| 0.710526
| 0
| 0.03764
| 0.294835
| 1,394
| 45
| 87
| 30.977778
| 0.774161
| 0.032281
| 0
| 0.631579
| 1
| 0
| 0.095026
| 0.017075
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| false
| 0
| 0.052632
| 0
| 0.131579
| 0
| 0
| 0
| 0
| null | 0
| 0
| 1
| 0
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 7
|
5e58512f5669ce2c0180389c3f480d839ba80dd0
| 10,550
|
py
|
Python
|
tests/input/TestTcp.py
|
dstore-dbap/LumberMill
|
b7cbadc209a83386871735b8ad88b61da917a6ab
|
[
"Apache-2.0"
] | 15
|
2015-12-14T19:07:28.000Z
|
2022-02-28T13:32:11.000Z
|
tests/input/TestTcp.py
|
dstore-dbap/LumberMill
|
b7cbadc209a83386871735b8ad88b61da917a6ab
|
[
"Apache-2.0"
] | null | null | null |
tests/input/TestTcp.py
|
dstore-dbap/LumberMill
|
b7cbadc209a83386871735b8ad88b61da917a6ab
|
[
"Apache-2.0"
] | 4
|
2017-02-08T10:49:55.000Z
|
2019-03-19T18:47:46.000Z
|
import sys
import time
import mock
import socket
import ssl
import lumbermill.utils.DictUtils as DictUtils
from lumbermill.input import Tcp
from lumbermill.constants import LUMBERMILL_BASEPATH
from tests.ModuleBaseTestCase import ModuleBaseTestCase
from tests.ServiceDiscovery import getFreeTcpPortoOnLocalhost
class TestTcp(ModuleBaseTestCase):
def setUp(self):
super(TestTcp, self).setUp(Tcp.Tcp(mock.Mock()))
def TestATcpConnection(self):
ipaddr, port = getFreeTcpPortoOnLocalhost()
self.test_object.configure({'interface': ipaddr,
'port': port,
'simple_separator': '\n'})
self.checkConfiguration()
self.test_object.initAfterFork()
self.startTornadoEventLoop()
# Give server process time to startup.
time.sleep(.1)
try:
s = socket.socket(socket.AF_INET, socket.SOCK_STREAM)
s.settimeout(1)
s.connect((ipaddr, self.test_object.getConfigurationValue('port')))
for _ in range(0, 1500):
s.sendall(b"Beethoven, Mozart, Chopin, Liszt, Brahms, Panties...I'm sorry...Schumann, Schubert, Mendelssohn and Bach. Names that will live for ever.\n")
s.shutdown(socket.SHUT_RDWR)
s.close()
connection_succeeded = True
except:
etype, evalue, etb = sys.exc_info()
print("Could not connect to %s:%s. Exception: %s, Error: %s" % ('localhost', self.test_object.getConfigurationValue("port"), etype, evalue))
connection_succeeded = False
self.assertTrue(connection_succeeded)
expected_ret_val = DictUtils.getDefaultEventDict({'data': b"Beethoven, Mozart, Chopin, Liszt, Brahms, Panties...I'm sorry...Schumann, Schubert, Mendelssohn and Bach. Names that will live for ever."})
expected_ret_val.pop('lumbermill')
event = False
time.sleep(2)
counter = 0
for event in self.receiver.getEvent():
counter += 1
self.assertTrue(event is not False)
self.assertEqual(counter, 1500)
event.pop('lumbermill')
self.assertDictEqual(event, expected_ret_val)
self.stopTornadoEventLoop()
self.tearDown()
def testATlsTcpConnection(self):
ipaddr, port = getFreeTcpPortoOnLocalhost()
self.test_object.configure({'interface': ipaddr,
'port': port,
'tls': True,
'key': LUMBERMILL_BASEPATH + '/../tests/test_data/gambolputty_ca.key',
'cert': LUMBERMILL_BASEPATH + '/../tests/test_data/gambolputty_ca.crt',
'timeout': 1})
self.checkConfiguration()
self.test_object.initAfterFork()
self.startTornadoEventLoop()
# Give server process time to startup.
time.sleep(.1)
try:
s = socket.socket(socket.AF_INET, socket.SOCK_STREAM)
s.settimeout(1)
s = ssl.wrap_socket(s, ssl_version=ssl.PROTOCOL_TLSv1)
s.connect((self.test_object.getConfigurationValue('interface'), self.test_object.getConfigurationValue('port')))
for _ in range(0, 1500):
s.sendall(b"Beethoven, Mozart, Chopin, Liszt, Brahms, Panties...I'm sorry...Schumann, Schubert, Mendelssohn and Bach. Names that will live for ever.\n")
s.close()
connection_succeeded = True
except:
etype, evalue, etb = sys.exc_info()
print("Could not connect to %s:%s. Exception: %s, Error: %s" % ( self.test_object.getConfigurationValue("interface"),
self.test_object.getConfigurationValue("port"), etype, evalue))
connection_succeeded = False
self.assertTrue(connection_succeeded)
expected_ret_val = DictUtils.getDefaultEventDict({'data': "Beethoven, Mozart, Chopin, Liszt, Brahms, Panties...I'm sorry...Schumann, Schubert, Mendelssohn and Bach. Names that will live for ever."})
expected_ret_val.pop('lumbermill')
event = False
time.sleep(2)
counter = 0
for event in self.receiver.getEvent():
counter += 1
self.assertTrue(event != False)
self.assertEqual(counter, 1500)
event.pop('lumbermill')
self.assertEqual(event['data'], b"Beethoven, Mozart, Chopin, Liszt, Brahms, Panties...I'm sorry...Schumann, Schubert, Mendelssohn and Bach. Names that will live for ever.")
self.stopTornadoEventLoop()
#self.tearDown()
def testLineModeRegexSeparator(self):
ipaddr, port = getFreeTcpPortoOnLocalhost()
self.test_object.configure({'interface': ipaddr,
'port': port,
'regex_separator': 'C[oO]nfused?\s+ca+t'})
self.checkConfiguration()
self.test_object.initAfterFork()
self.startTornadoEventLoop()
# Give server process time to startup.
time.sleep(.1)
try:
s = socket.socket(socket.AF_INET, socket.SOCK_STREAM)
s.settimeout(1)
s.connect((self.test_object.getConfigurationValue('interface'), self.test_object.getConfigurationValue('port')))
s.sendall(b"Beethoven, Mozart, Chopin, Liszt, Brahms, Panties...I'm sorry...Schumann, Schubert, Mendelssohn and Bach. Names that will live for ever.Confused catBeethoven, Mozart, Chopin, Liszt, Brahms, Panties...I'm sorry...Schumann, Schubert, Mendelssohn and Bach. Names that will live for ever.COnfuse cat")
s.sendall(b"Beethoven, Mozart, Chopin, Liszt, Brahms, Panties...I'm sorry...Schumann, Schubert, Mendelssohn and Bach. Names that will live for ever.Confused caaatBeethoven, Mozart, Chopin, Liszt, Brahms, Panties...I'm sorry...Schumann, Schubert, Mendelssohn and Bach. Names that will live for ever.Confused cat")
s.close()
connection_succeeded = True
except:
etype, evalue, etb = sys.exc_info()
print("Could not connect to %s:%s. Exception: %s, Error: %s" % ( 'localhost', self.test_object.getConfigurationValue("port"), etype, evalue))
connection_succeeded = False
self.assertTrue(connection_succeeded)
event = None
time.sleep(1)
counter = 0
for event in self.receiver.getEvent():
counter += 1
self.assertIsNotNone(event)
self.assertEqual(counter, 4)
self.assertEqual(event['data'], b"Beethoven, Mozart, Chopin, Liszt, Brahms, Panties...I'm sorry...Schumann, Schubert, Mendelssohn and Bach. Names that will live for ever.Confused cat")
#self.tearDown()
def testLineModeSimpleSeparator(self):
ipaddr, port = getFreeTcpPortoOnLocalhost()
self.test_object.configure({'interface': ipaddr,
'port': port,
'simple_separator': '***'})
self.checkConfiguration()
self.test_object.initAfterFork()
self.startTornadoEventLoop()
# Give server process time to startup.
time.sleep(.1)
try:
s = socket.socket(socket.AF_INET, socket.SOCK_STREAM)
s.settimeout(1)
s.connect((self.test_object.getConfigurationValue('interface'), self.test_object.getConfigurationValue('port')))
s.sendall(b"Beethoven, Mozart, Chopin, Liszt, Brahms, Panties...I'm sorry...Schumann, Schubert, Mendelssohn and Bach. Names that will live for ever.***Beethoven, Mozart, Chopin, Liszt, Brahms, Panties...I'm sorry...Schumann, Schubert, Mendelssohn and Bach. Names that will live for ever.***")
s.sendall(b"Beethoven, Mozart, Chopin, Liszt, Brahms, Panties...I'm sorry...Schumann, Schubert, Mendelssohn and Bach. Names that will live for ever.***Beethoven, Mozart, Chopin, Liszt, Brahms, Panties...I'm sorry...Schumann, Schubert, Mendelssohn and Bach. Names that will live for ever.***")
s.close()
connection_succeeded = True
except:
etype, evalue, etb = sys.exc_info()
print("Could not connect to %s:%s. Exception: %s, Error: %s" % ( 'localhost', self.test_object.getConfigurationValue("port"), etype, evalue))
connection_succeeded = False
self.assertTrue(connection_succeeded)
event = None
time.sleep(1)
counter = 0
for event in self.receiver.getEvent():
counter += 1
self.assertIsNotNone(event)
self.assertEqual(counter, 4)
self.assertEqual(event['data'], b"Beethoven, Mozart, Chopin, Liszt, Brahms, Panties...I'm sorry...Schumann, Schubert, Mendelssohn and Bach. Names that will live for ever.***")
#self.tearDown()
def testStreamMode(self):
ipaddr, port = getFreeTcpPortoOnLocalhost()
self.test_object.configure({'interface': ipaddr,
'port': port,
'mode': 'stream',
'chunksize': 1024})
self.checkConfiguration()
self.test_object.initAfterFork()
self.startTornadoEventLoop()
# Give server process time to startup.
time.sleep(.1)
try:
s = socket.socket(socket.AF_INET, socket.SOCK_STREAM)
s.settimeout(1)
s.connect((self.test_object.getConfigurationValue('interface'), self.test_object.getConfigurationValue('port')))
for _ in range(0, 50):
s.sendall(b"Beethoven, Mozart, Chopin, Liszt, Brahms, Panties...I'm sorry...Schumann, Schubert, Mendelssohn and Bach. Names that will live for ever.")
s.close()
connection_succeeded = True
except:
etype, evalue, etb = sys.exc_info()
print("Could not connect to %s:%s. Exception: %s, Error: %s" % ('localhost', self.test_object.getConfigurationValue("port"), etype, evalue))
connection_succeeded = False
self.assertTrue(connection_succeeded)
events = []
time.sleep(1)
for event in self.receiver.getEvent():
events.append(event)
self.assertEqual(len(events), 6)
self.assertEqual(len(events[0]['data']), 1024)
# #self.tearDown()
def tearDown(self):
self.test_object.shutDown()
ModuleBaseTestCase.tearDown(self)
| 53.015075
| 326
| 0.616493
| 1,132
| 10,550
| 5.672261
| 0.135159
| 0.032394
| 0.056689
| 0.057312
| 0.86295
| 0.851425
| 0.846753
| 0.833048
| 0.833048
| 0.817162
| 0
| 0.007544
| 0.27128
| 10,550
| 199
| 327
| 53.015075
| 0.827653
| 0.023602
| 0
| 0.711111
| 0
| 0.066667
| 0.28695
| 0.007385
| 0
| 0
| 0
| 0
| 0.105556
| 1
| 0.038889
| false
| 0
| 0.055556
| 0
| 0.1
| 0.027778
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 7
|
5e7d71fd11abe0c19991a0f7c80446de68ca3393
| 40,451
|
py
|
Python
|
test/test_markdown_fenced_code_blocks.py
|
scop/pymarkdown
|
562ba8f7857d99ba09e86e42de5a37ec6d9b2c30
|
[
"MIT"
] | null | null | null |
test/test_markdown_fenced_code_blocks.py
|
scop/pymarkdown
|
562ba8f7857d99ba09e86e42de5a37ec6d9b2c30
|
[
"MIT"
] | null | null | null |
test/test_markdown_fenced_code_blocks.py
|
scop/pymarkdown
|
562ba8f7857d99ba09e86e42de5a37ec6d9b2c30
|
[
"MIT"
] | null | null | null |
"""
https://github.github.com/gfm/#fenced-code-blocks
"""
import pytest
from .utils import act_and_assert
# pylint: disable=too-many-lines
@pytest.mark.gfm
def test_fenced_code_blocks_089():
"""
Test case 089: Simple example with backticks
"""
# Arrange
source_markdown = """```
<
>
```"""
expected_tokens = [
"[fcode-block(1,1):`:3::::::]",
"[text(2,1):\a<\a<\a\n \a>\a>\a:]",
"[end-fcode-block::3:False]",
]
expected_gfm = """<pre><code><
>
</code></pre>"""
# Act & Assert
act_and_assert(source_markdown, expected_gfm, expected_tokens)
@pytest.mark.gfm
def test_fenced_code_blocks_090():
"""
Test case 090: Simple example with tildes
"""
# Arrange
source_markdown = """~~~
<
>
~~~"""
expected_tokens = [
"[fcode-block(1,1):~:3::::::]",
"[text(2,1):\a<\a<\a\n \a>\a>\a:]",
"[end-fcode-block::3:False]",
]
expected_gfm = """<pre><code><
>
</code></pre>"""
# Act & Assert
act_and_assert(source_markdown, expected_gfm, expected_tokens)
@pytest.mark.gfm
def test_fenced_code_blocks_091():
"""
Test case 091: Fewer than three backticks is not enough:
"""
# Arrange
source_markdown = """``
foo
``"""
expected_tokens = [
"[para(1,1):\n\n]",
"[icode-span(1,1):foo:``:\a\n\a \a:\a\n\a \a]",
"[end-para:::True]",
]
expected_gfm = """<p><code>foo</code></p>"""
# Act & Assert
act_and_assert(source_markdown, expected_gfm, expected_tokens)
@pytest.mark.gfm
def test_fenced_code_blocks_092():
"""
Test case 092: (part a) The closing code fence must use the same character as the opening fence:
"""
# Arrange
source_markdown = """```
aaa
~~~
```"""
expected_tokens = [
"[fcode-block(1,1):`:3::::::]",
"[text(2,1):aaa\n~~~:]",
"[end-fcode-block::3:False]",
]
expected_gfm = """<pre><code>aaa
~~~
</code></pre>"""
# Act & Assert
act_and_assert(source_markdown, expected_gfm, expected_tokens)
@pytest.mark.gfm
def test_fenced_code_blocks_093():
"""
Test case 093: (part b) The closing code fence must use the same character as the opening fence:
"""
# Arrange
source_markdown = """~~~
aaa
```
~~~"""
expected_tokens = [
"[fcode-block(1,1):~:3::::::]",
"[text(2,1):aaa\n```:]",
"[end-fcode-block::3:False]",
]
expected_gfm = """<pre><code>aaa
```
</code></pre>"""
# Act & Assert
act_and_assert(source_markdown, expected_gfm, expected_tokens)
@pytest.mark.gfm
def test_fenced_code_blocks_094():
"""
Test case 094: (part a) The closing code fence must be at least as long as the opening fence:
"""
# Arrange
source_markdown = """````
aaa
```
``````"""
expected_tokens = [
"[fcode-block(1,1):`:4::::::]",
"[text(2,1):aaa\n```:]",
"[end-fcode-block::6:False]",
]
expected_gfm = """<pre><code>aaa
```
</code></pre>"""
# Act & Assert
act_and_assert(source_markdown, expected_gfm, expected_tokens)
@pytest.mark.gfm
def test_fenced_code_blocks_095():
"""
Test case 095: (part b) The closing code fence must be at least as long as the opening fence:
"""
# Arrange
source_markdown = """~~~~
aaa
~~~
~~~~"""
expected_tokens = [
"[fcode-block(1,1):~:4::::::]",
"[text(2,1):aaa\n~~~:]",
"[end-fcode-block::4:False]",
]
expected_gfm = """<pre><code>aaa
~~~
</code></pre>"""
# Act & Assert
act_and_assert(source_markdown, expected_gfm, expected_tokens)
@pytest.mark.gfm
def test_fenced_code_blocks_096():
"""
Test case 096: (part a) Unclosed code blocks are closed by the end of the document (or the enclosing block quote or list item):
"""
# Arrange
source_markdown = """```"""
expected_tokens = ["[fcode-block(1,1):`:3::::::]", "[end-fcode-block:::True]"]
expected_gfm = """<pre><code></code></pre>"""
# Act & Assert
act_and_assert(source_markdown, expected_gfm, expected_tokens)
@pytest.mark.gfm
def test_fenced_code_blocks_096a():
"""
Test case 096a: variation
"""
# Arrange
source_markdown = """```
"""
expected_tokens = [
"[fcode-block(1,1):`:3::::::]",
"[text(2,1)::]",
"[end-fcode-block:::True]",
]
expected_gfm = """<pre><code></code></pre>"""
# Act & Assert
act_and_assert(source_markdown, expected_gfm, expected_tokens)
@pytest.mark.gfm
def test_fenced_code_blocks_096b():
"""
Test case 096b: variation
"""
# Arrange
source_markdown = """```
"""
expected_tokens = [
"[fcode-block(1,1):`:3::::::]",
"[text(2,1):\n\x03:]",
"[end-fcode-block:::True]",
]
expected_gfm = """<pre><code>\n</code></pre>"""
# Act & Assert
act_and_assert(source_markdown, expected_gfm, expected_tokens)
@pytest.mark.gfm
def test_fenced_code_blocks_096c():
"""
Test case 096b: variation
"""
# Arrange
source_markdown = """```
"""
expected_tokens = [
"[fcode-block(1,1):`:3::::::]",
"[text(2,1):\n\x03\n\x03:]",
"[end-fcode-block:::True]",
]
expected_gfm = """<pre><code>
</code></pre>"""
# Act & Assert
act_and_assert(source_markdown, expected_gfm, expected_tokens)
@pytest.mark.gfm
def test_fenced_code_blocks_096d():
"""
Test case 096b: variation
"""
# Arrange
source_markdown = """```
/a
/a/a
""".replace(
"/a", " "
)
expected_tokens = [
"[fcode-block(1,1):`:3::::::]",
"[text(2,2):\n\x03 \n\x03: ]",
"[end-fcode-block:::True]",
]
expected_gfm = """<pre><code>\a
\a\a
</code></pre>""".replace(
"\a", " "
)
# Act & Assert
act_and_assert(source_markdown, expected_gfm, expected_tokens)
@pytest.mark.gfm
def test_fenced_code_blocks_096e():
"""
Test case 096b: variation
"""
# Arrange
source_markdown = """```
abc
/a/a
""".replace(
"/a", " "
)
expected_tokens = [
"[fcode-block(1,1):`:3::::::]",
"[text(2,1):abc\n\x03 \n\x03:]",
"[end-fcode-block:::True]",
]
expected_gfm = """<pre><code>abc
</code></pre>"""
# Act & Assert
act_and_assert(source_markdown, expected_gfm, expected_tokens)
@pytest.mark.gfm
def test_fenced_code_blocks_096f():
"""
Test case 096b: variation
"""
# Arrange
source_markdown = """```
abc
"""
expected_tokens = [
"[fcode-block(1,1):`:3::::::]",
"[text(2,1):abc\n\x03\n\x03:]",
"[end-fcode-block:::True]",
]
expected_gfm = """<pre><code>abc
</code></pre>"""
# Act & Assert
act_and_assert(source_markdown, expected_gfm, expected_tokens, show_debug=True)
@pytest.mark.gfm
def test_fenced_code_blocks_097():
"""
Test case 097: (part b) Unclosed code blocks are closed by the end of the document (or the enclosing block quote or list item):
"""
# Arrange
source_markdown = """`````
```
aaa"""
expected_tokens = [
"[fcode-block(1,1):`:5::::::]",
"[text(2,1):\n```\naaa:]",
"[end-fcode-block:::True]",
]
expected_gfm = """<pre><code>
```
aaa
</code></pre>"""
# Act & Assert
act_and_assert(source_markdown, expected_gfm, expected_tokens)
@pytest.mark.gfm
def test_fenced_code_blocks_098x():
"""
Test case 098: (part c) Unclosed code blocks are closed by the end of the document (or the enclosing block quote or list item):
"""
# Arrange
source_markdown = """> ```
> aaa
bbb"""
expected_tokens = [
"[block-quote(1,1)::> \n> \n]",
"[fcode-block(1,3):`:3::::::]",
"[text(2,3):aaa:]",
"[end-fcode-block:::True]",
"[end-block-quote:::True]",
"[BLANK(3,1):]",
"[para(4,1):]",
"[text(4,1):bbb:]",
"[end-para:::True]",
]
expected_gfm = """<blockquote>
<pre><code>aaa
</code></pre>
</blockquote>
<p>bbb</p>"""
# Act & Assert
act_and_assert(source_markdown, expected_gfm, expected_tokens)
@pytest.mark.gfm
def test_fenced_code_blocks_098a():
"""
Test case 098a: Modified 98 without a space between the block quote indicator and the string.
"""
# Arrange
source_markdown = """> ```
>aaa
bbb"""
expected_tokens = [
"[block-quote(1,1)::> \n>\n]",
"[fcode-block(1,3):`:3::::::]",
"[text(2,2):aaa:]",
"[end-fcode-block:::True]",
"[end-block-quote:::True]",
"[BLANK(3,1):]",
"[para(4,1):]",
"[text(4,1):bbb:]",
"[end-para:::True]",
]
expected_gfm = """<blockquote>
<pre><code>aaa
</code></pre>
</blockquote>
<p>bbb</p>"""
# Act & Assert
act_and_assert(source_markdown, expected_gfm, expected_tokens)
@pytest.mark.gfm
def test_fenced_code_blocks_098b():
"""
Test case 098b: Modified 98 with extra ">" before second line.
"""
# Arrange
source_markdown = """> ```
>> aaa
bbb"""
expected_tokens = [
"[block-quote(1,1)::> \n>\n]",
"[fcode-block(1,3):`:3::::::]",
"[text(2,2):\a>\a>\a aaa:]",
"[end-fcode-block:::True]",
"[end-block-quote:::True]",
"[BLANK(3,1):]",
"[para(4,1):]",
"[text(4,1):bbb:]",
"[end-para:::True]",
]
expected_gfm = """<blockquote>
<pre><code>> aaa
</code></pre>
</blockquote>
<p>bbb</p>"""
# Act & Assert
act_and_assert(source_markdown, expected_gfm, expected_tokens)
@pytest.mark.gfm
def test_fenced_code_blocks_098c():
"""
Test case 098c: Modified 98 with less ">" before second line.
"""
# Arrange
source_markdown = """> ```
aaa
bbb"""
expected_tokens = [
"[block-quote(1,1)::> ]",
"[fcode-block(1,3):`:3::::::]",
"[end-fcode-block:::True]",
"[end-block-quote:::True]",
"[para(2,1):]",
"[text(2,1):aaa:]",
"[end-para:::True]",
"[BLANK(3,1):]",
"[para(4,1):]",
"[text(4,1):bbb:]",
"[end-para:::True]",
]
expected_gfm = """<blockquote>
<pre><code></code></pre>
</blockquote>
<p>aaa</p>
<p>bbb</p>"""
# Act & Assert
act_and_assert(source_markdown, expected_gfm, expected_tokens)
@pytest.mark.gfm
def test_fenced_code_blocks_099x():
"""
Test case 099: A code block can have all empty lines as its content:
"""
# Arrange
source_markdown = """```
\a\a
```""".replace(
"\a", " "
)
expected_tokens = [
"[fcode-block(1,1):`:3::::::]",
"[text(2,1):\n\x03 :]",
"[end-fcode-block::3:False]",
]
expected_gfm = """<pre><code>
\a\a
</code></pre>""".replace(
"\a", " "
)
# Act & Assert
act_and_assert(source_markdown, expected_gfm, expected_tokens)
@pytest.mark.gfm
def test_fenced_code_blocks_099a():
"""
Test case 099a: variation
"""
# Arrange
source_markdown = """```
\a\a
```""".replace(
"\a", " "
)
expected_tokens = [
"[fcode-block(1,1):`:3::::::]",
"[text(2,1):\n\x03 \n\x03:]",
"[end-fcode-block::3:False]",
]
expected_gfm = """<pre><code>
\a\a
</code></pre>""".replace(
"\a", " "
)
# Act & Assert
act_and_assert(source_markdown, expected_gfm, expected_tokens)
@pytest.mark.gfm
def test_fenced_code_blocks_099b():
"""
Test case 099b: variation
"""
# Arrange
source_markdown = """```
\a
\a\a
\a
```""".replace(
"\a", " "
)
expected_tokens = [
"[fcode-block(1,1):`:3::::::]",
"[text(2,1):\n\x03 \n\x03 \n\x03 \n\x03:]",
"[end-fcode-block::3:False]",
]
expected_gfm = """<pre><code>
\a
\a\a
\a
</code></pre>""".replace(
"\a", " "
)
# Act & Assert
act_and_assert(source_markdown, expected_gfm, expected_tokens)
@pytest.mark.gfm
def test_fenced_code_blocks_099c():
"""
Test case 099c: variation
"""
# Arrange
source_markdown = """```
z
\a
\a\a
\a
z
```""".replace(
"\a", " "
)
expected_tokens = [
"[fcode-block(1,1):`:3::::::]",
"[text(2,1):z\n\x03 \n\x03 \n\x03 \nz:]",
"[end-fcode-block::3:False]",
]
expected_gfm = """<pre><code>z
\a
\a\a
\a
z
</code></pre>""".replace(
"\a", " "
)
# Act & Assert
act_and_assert(source_markdown, expected_gfm, expected_tokens)
@pytest.mark.gfm
def test_fenced_code_blocks_099d():
"""
Test case 099c: variation
"""
# Arrange
source_markdown = """```
z
\a
z
```""".replace(
"\a", " "
)
expected_tokens = [
"[fcode-block(1,1):`:3::::::]",
"[text(2,1):z\n\x03 \nz:]",
"[end-fcode-block::3:False]",
]
expected_gfm = """<pre><code>z
\a
z
</code></pre>""".replace(
"\a", " "
)
# Act & Assert
act_and_assert(source_markdown, expected_gfm, expected_tokens)
@pytest.mark.gfm
def test_fenced_code_blocks_099e():
"""
Test case 099e: variation
"""
# Arrange
source_markdown = """```
\a
\a\a
\a
```""".replace(
"\a", " "
)
expected_tokens = [
"[fcode-block(1,1):`:3::::::]",
"[text(2,2):\n\x03 \n\x03 : ]",
"[end-fcode-block::3:False]",
]
expected_gfm = """<pre><code>\a
\a\a
\a
</code></pre>""".replace(
"\a", " "
)
# Act & Assert
act_and_assert(source_markdown, expected_gfm, expected_tokens)
@pytest.mark.gfm
def test_fenced_code_blocks_099f():
"""
Test case 099f: variation
"""
# Arrange
source_markdown = """```
\a
\a\aabc
\a
```""".replace(
"\a", " "
)
expected_tokens = [
"[fcode-block(1,1):`:3::::::]",
"[text(2,1):\n\x03 \n abc\n\x03 \n\x03:]",
"[end-fcode-block::3:False]",
]
expected_gfm = """<pre><code>
\a
\a\aabc
\a
</code></pre>""".replace(
"\a", " "
)
# Act & Assert
act_and_assert(source_markdown, expected_gfm, expected_tokens)
@pytest.mark.gfm
def test_fenced_code_blocks_099g():
"""
Test case 099c: variation
"""
# Arrange
source_markdown = """```
z
\a
\a\a
\a
z
```""".replace(
"\a", " "
)
expected_tokens = [
"[fcode-block(1,1):`:3::::::]",
"[text(2,1):z\n\x03\n\x03 \n\x03 \n\x03 \n\x03\nz:]",
"[end-fcode-block::3:False]",
]
expected_gfm = """<pre><code>z
\a
\a\a
\a
z
</code></pre>""".replace(
"\a", " "
)
# Act & Assert
act_and_assert(source_markdown, expected_gfm, expected_tokens)
@pytest.mark.gfm
def test_fenced_code_blocks_099h():
"""
Test case 099h: variation
"""
# Arrange
source_markdown = """`````
```
aaa
`````"""
expected_tokens = [
"[fcode-block(1,1):`:5::::::]",
"[text(2,1):\n```\naaa:]",
"[end-fcode-block::5:False]",
]
expected_gfm = """<pre><code>
```
aaa
</code></pre>"""
# Act & Assert
act_and_assert(source_markdown, expected_gfm, expected_tokens)
@pytest.mark.gfm
def test_fenced_code_blocks_099i():
"""
Test case 099h: variation
"""
# Arrange
source_markdown = """`````
```
aaa
`````"""
expected_tokens = [
"[fcode-block(1,1):`:5::::::]",
"[text(2,1):\n\x03\n```\naaa:]",
"[end-fcode-block::5:False]",
]
expected_gfm = """<pre><code>
```
aaa
</code></pre>"""
# Act & Assert
act_and_assert(source_markdown, expected_gfm, expected_tokens)
@pytest.mark.gfm
def test_fenced_code_blocks_099j():
"""
Test case 099h: variation
"""
# Arrange
source_markdown = """`````
bbb
ccc
```
aaa
`````"""
expected_tokens = [
"[fcode-block(1,1):`:5::::::]",
"[text(2,1):\n\x03\nbbb\n\x03\nccc\n\x03\n```\naaa:]",
"[end-fcode-block::5:False]",
]
expected_gfm = """<pre><code>
bbb
ccc
```
aaa
</code></pre>"""
# Act & Assert
act_and_assert(source_markdown, expected_gfm, expected_tokens)
@pytest.mark.gfm
def test_fenced_code_blocks_099ja():
"""
Test case 099h: variation
"""
# Arrange
source_markdown = """`````
bbb
ccc
```
aaa
`````
"""
expected_tokens = [
"[fcode-block(1,1):`:5::::::]",
"[text(2,1):\n\x03\nbbb\n\x03\nccc\n\x03\n```\naaa:]",
"[end-fcode-block::5:False]",
"[BLANK(11,1):]",
]
expected_gfm = """<pre><code>
bbb
ccc
```
aaa
</code></pre>"""
# Act & Assert
act_and_assert(source_markdown, expected_gfm, expected_tokens)
@pytest.mark.gfm
def test_fenced_code_blocks_099k():
"""
Test case 099f: variation
"""
# Arrange
source_markdown = """```
\a
\a\aabc
\a
```""".replace(
"\a", " "
)
expected_tokens = [
"[fcode-block(1,1):`:3::::::]",
"[text(2,1):\n\x03\n\x03\n\x03 \n abc\n\x03 \n\x03\n\x03\n\x03:]",
"[end-fcode-block::3:False]",
]
expected_gfm = """<pre><code>
\a
\a\aabc
\a
</code></pre>""".replace(
"\a", " "
)
# Act & Assert
act_and_assert(source_markdown, expected_gfm, expected_tokens)
@pytest.mark.gfm
def test_fenced_code_blocks_099l():
"""
Test case 099c: variation
"""
# Arrange
source_markdown = """```
z
\a
\a\a
\a
z
```""".replace(
"\a", " "
)
expected_tokens = [
"[fcode-block(1,1):`:3::::::]",
"[text(2,1):z\n\x03\n\x03\n\x03\n\x03 \n\x03 \n\x03 \n\x03\n\x03\n\x03\nz:]",
"[end-fcode-block::3:False]",
]
expected_gfm = """<pre><code>z
\a
\a\a
\a
z
</code></pre>""".replace(
"\a", " "
)
# Act & Assert
act_and_assert(source_markdown, expected_gfm, expected_tokens)
@pytest.mark.gfm
def test_fenced_code_blocks_100():
"""
Test case 100: A code block can be empty:
"""
# Arrange
source_markdown = """```
```"""
expected_tokens = ["[fcode-block(1,1):`:3::::::]", "[end-fcode-block::3:False]"]
expected_gfm = """<pre><code></code></pre>"""
# Act & Assert
act_and_assert(source_markdown, expected_gfm, expected_tokens)
@pytest.mark.gfm
def test_fenced_code_blocks_101():
"""
Test case 101: (part a) Fences can be indented. If the opening fence is indented, content lines will have equivalent opening indentation removed, if present:
"""
# Arrange
source_markdown = """ ```
aaa
aaa
```"""
expected_tokens = [
"[fcode-block(1,2):`:3::::: :]",
"[text(2,2):aaa\naaa:\a \a\x03\a]",
"[end-fcode-block::3:False]",
]
expected_gfm = """<pre><code>aaa
aaa
</code></pre>"""
# Act & Assert
act_and_assert(source_markdown, expected_gfm, expected_tokens)
@pytest.mark.gfm
def test_fenced_code_blocks_102():
"""
Test case 102: (part b) Fences can be indented. If the opening fence is indented, content lines will have equivalent opening indentation removed, if present:
"""
# Arrange
source_markdown = """ ```
aaa
aaa
aaa
```"""
expected_tokens = [
"[fcode-block(1,3):`:3::::: :]",
"[text(2,1):aaa\n\a \a\x03\aaaa\naaa:]",
"[end-fcode-block: :3:False]",
]
expected_gfm = """<pre><code>aaa
aaa
aaa
</code></pre>"""
# Act & Assert
act_and_assert(source_markdown, expected_gfm, expected_tokens)
@pytest.mark.gfm
def test_fenced_code_blocks_103():
"""
Test case 103: (part c) Fences can be indented. If the opening fence is indented, content lines will have equivalent opening indentation removed, if present:
"""
# Arrange
source_markdown = """ ```
aaa
aaa
aaa
```"""
expected_tokens = [
"[fcode-block(1,4):`:3::::: :]",
"[text(2,4):aaa\n\a \a\x03\a aaa\n\a \a\x03\aaaa:\a \a\x03\a]",
"[end-fcode-block: :3:False]",
]
expected_gfm = """<pre><code>aaa
aaa
aaa
</code></pre>"""
# Act & Assert
act_and_assert(source_markdown, expected_gfm, expected_tokens)
@pytest.mark.gfm
def test_fenced_code_blocks_104():
"""
Test case 104: Four spaces indentation produces an indented code block:
"""
# Arrange
source_markdown = """ ```
aaa
```"""
expected_tokens = [
"[icode-block(1,5): :\n \n ]",
"[text(1,5):```\naaa\n```:]",
"[end-icode-block:::True]",
]
expected_gfm = """<pre><code>```
aaa
```
</code></pre>"""
# Act & Assert
act_and_assert(source_markdown, expected_gfm, expected_tokens)
@pytest.mark.gfm
def test_fenced_code_blocks_105():
"""
Test case 105: (part a) Closing fences may be indented by 0-3 spaces, and their indentation need not match that of the opening fence:
"""
# Arrange
source_markdown = """```
aaa
```"""
expected_tokens = [
"[fcode-block(1,1):`:3::::::]",
"[text(2,1):aaa:]",
"[end-fcode-block: :3:False]",
]
expected_gfm = """<pre><code>aaa
</code></pre>"""
# Act & Assert
act_and_assert(source_markdown, expected_gfm, expected_tokens)
@pytest.mark.gfm
def test_fenced_code_blocks_106():
"""
Test case 106: (part b) Closing fences may be indented by 0-3 spaces, and their indentation need not match that of the opening fence:
"""
# Arrange
source_markdown = """ ```
aaa
```"""
expected_tokens = [
"[fcode-block(1,4):`:3::::: :]",
"[text(2,1):aaa:]",
"[end-fcode-block: :3:False]",
]
expected_gfm = """<pre><code>aaa
</code></pre>"""
# Act & Assert
act_and_assert(source_markdown, expected_gfm, expected_tokens)
@pytest.mark.gfm
def test_fenced_code_blocks_107():
"""
Test case 107: This is not a closing fence, because it is indented 4 spaces:
"""
# Arrange
source_markdown = """```
aaa
```"""
expected_tokens = [
"[fcode-block(1,1):`:3::::::]",
"[text(2,1):aaa\n ```:]",
"[end-fcode-block:::True]",
]
expected_gfm = """<pre><code>aaa
```
</code></pre>"""
# Act & Assert
act_and_assert(source_markdown, expected_gfm, expected_tokens)
@pytest.mark.gfm
def test_fenced_code_blocks_108():
"""
Test case 108: (part a) Code fences (opening and closing) cannot contain internal spaces:
"""
# Arrange
source_markdown = """``` ```
aaa"""
expected_tokens = [
"[para(1,1):\n]",
"[icode-span(1,1): :```::]",
"""[text(1,8):
aaa::\n]""",
"[end-para:::True]",
]
expected_gfm = """<p><code> </code>
aaa</p>"""
# Act & Assert
act_and_assert(source_markdown, expected_gfm, expected_tokens)
@pytest.mark.gfm
def test_fenced_code_blocks_109():
"""
Test case 109: (part b) Code fences (opening and closing) cannot contain internal spaces:
"""
# Arrange
source_markdown = """~~~~~~
aaa
~~~ ~~"""
expected_tokens = [
"[fcode-block(1,1):~:6::::::]",
"[text(2,1):aaa\n~~~ ~~:]",
"[end-fcode-block:::True]",
]
expected_gfm = """<pre><code>aaa
~~~ ~~
</code></pre>"""
# Act & Assert
act_and_assert(source_markdown, expected_gfm, expected_tokens)
@pytest.mark.gfm
def test_fenced_code_blocks_110():
"""
Test case 110: Fenced code blocks can interrupt paragraphs, and can be followed directly by paragraphs, without a blank line between:
"""
# Arrange
source_markdown = """foo
```
bar
```
baz"""
expected_tokens = [
"[para(1,1):]",
"[text(1,1):foo:]",
"[end-para:::False]",
"[fcode-block(2,1):`:3::::::]",
"[text(3,1):bar:]",
"[end-fcode-block::3:False]",
"[para(5,1):]",
"[text(5,1):baz:]",
"[end-para:::True]",
]
expected_gfm = """<p>foo</p>
<pre><code>bar
</code></pre>
<p>baz</p>"""
# Act & Assert
act_and_assert(source_markdown, expected_gfm, expected_tokens)
@pytest.mark.gfm
def test_fenced_code_blocks_111():
"""
Test case 111: Other blocks can also occur before and after fenced code blocks without an intervening blank line:
"""
# Arrange
source_markdown = """foo
---
~~~
bar
~~~
# baz"""
expected_tokens = [
"[setext(2,1):-:3::(1,1)]",
"[text(1,1):foo:]",
"[end-setext::]",
"[fcode-block(3,1):~:3::::::]",
"[text(4,1):bar:]",
"[end-fcode-block::3:False]",
"[atx(6,1):1:0:]",
"[text(6,3):baz: ]",
"[end-atx::]",
]
expected_gfm = """<h2>foo</h2>
<pre><code>bar
</code></pre>
<h1>baz</h1>"""
# Act & Assert
act_and_assert(source_markdown, expected_gfm, expected_tokens)
@pytest.mark.gfm
def test_fenced_code_blocks_112():
"""
Test case 112: (part a) An info string can be provided after the opening code fence.
"""
# Arrange
source_markdown = """```ruby
def foo(x)
return 3
end
```"""
expected_tokens = [
"[fcode-block(1,1):`:3:ruby:::::]",
"[text(2,1):def foo(x)\n return 3\nend:]",
"[end-fcode-block::3:False]",
]
expected_gfm = """<pre><code class="language-ruby">def foo(x)
return 3
end
</code></pre>"""
# Act & Assert
act_and_assert(source_markdown, expected_gfm, expected_tokens)
@pytest.mark.gfm
def test_fenced_code_blocks_113():
"""
Test case 113: (part b) An info string can be provided after the opening code fence.
"""
# Arrange
source_markdown = """~~~~ ruby startline=3 $%@#$
def foo(x)
return 3
end
~~~~~~~"""
expected_tokens = [
"[fcode-block(1,1):~:4:ruby:: startline=3 $%@#$::: ]",
"[text(2,1):def foo(x)\n return 3\nend:]",
"[end-fcode-block::7:False]",
]
expected_gfm = """<pre><code class="language-ruby">def foo(x)
return 3
end
</code></pre>"""
# Act & Assert
act_and_assert(source_markdown, expected_gfm, expected_tokens)
@pytest.mark.gfm
def test_fenced_code_blocks_114():
"""
Test case 114: (part c) An info string can be provided after the opening code fence.
"""
# Arrange
source_markdown = """````;
````"""
expected_tokens = ["[fcode-block(1,1):`:4:;:::::]", "[end-fcode-block::4:False]"]
expected_gfm = """<pre><code class="language-;"></code></pre>"""
# Act & Assert
act_and_assert(source_markdown, expected_gfm, expected_tokens)
@pytest.mark.gfm
def test_fenced_code_blocks_115():
"""
Test case 115: Info strings for backtick code blocks cannot contain backticks:
"""
# Arrange
source_markdown = """``` aa ```
foo"""
expected_tokens = [
"[para(1,1):\n]",
"[icode-span(1,1):aa:```: : ]",
"""[text(1,11):
foo::\n]""",
"[end-para:::True]",
]
expected_gfm = """<p><code>aa</code>
foo</p>"""
# Act & Assert
act_and_assert(source_markdown, expected_gfm, expected_tokens)
@pytest.mark.gfm
def test_fenced_code_blocks_116():
"""
Test case 116: Info strings for tilde code blocks can contain backticks and tildes:
"""
# Arrange
source_markdown = """~~~ aa ``` ~~~
foo
~~~"""
expected_tokens = [
"[fcode-block(1,1):~:3:aa:: ``` ~~~::: ]",
"[text(2,1):foo:]",
"[end-fcode-block::3:False]",
]
expected_gfm = """<pre><code class="language-aa">foo
</code></pre>"""
# Act & Assert
act_and_assert(source_markdown, expected_gfm, expected_tokens)
@pytest.mark.gfm
def test_fenced_code_blocks_117():
"""
Test case 117: Closing code fences cannot have info strings:
"""
# Arrange
source_markdown = """```
``` aaa
```"""
expected_tokens = [
"[fcode-block(1,1):`:3::::::]",
"[text(2,1):``` aaa:]",
"[end-fcode-block::3:False]",
]
expected_gfm = """<pre><code>``` aaa
</code></pre>"""
# Act & Assert
act_and_assert(source_markdown, expected_gfm, expected_tokens)
@pytest.mark.gfm
def test_fenced_code_blocks_extra_01x():
"""
Test case extra 01: start a "list block" within a fenced code block
"""
# Arrange
source_markdown = """```
- some text
some other text
```
"""
expected_tokens = [
"[fcode-block(1,1):`:3::::::]",
"[text(2,1):- some text\nsome other text:]",
"[end-fcode-block::3:False]",
"[BLANK(5,1):]",
]
expected_gfm = """<pre><code>- some text
some other text
</code></pre>"""
# Act & Assert
act_and_assert(source_markdown, expected_gfm, expected_tokens)
@pytest.mark.gfm
def test_fenced_code_blocks_extra_01a():
"""
Test case extra 01: start a "list block" within a fenced code block
"""
# Arrange
source_markdown = """```
- [foo]:
/url
```
"""
expected_tokens = [
"[fcode-block(1,1):`:3::::::]",
"[text(2,1):- [foo]:\n/url:]",
"[end-fcode-block::3:False]",
"[BLANK(5,1):]",
]
expected_gfm = """<pre><code>- [foo]:
/url
</code></pre>"""
# Act & Assert
act_and_assert(source_markdown, expected_gfm, expected_tokens)
@pytest.mark.gfm
def test_fenced_code_blocks_extra_02x():
"""
Test case extra 02: variation of 1 where list already opened
"""
# Arrange
source_markdown = """- ```
- some text
some other text
```
"""
expected_tokens = [
"[ulist(1,1):-::2::]",
"[fcode-block(1,3):`:3::::::]",
"[end-fcode-block:::True]",
"[li(2,1):2::]",
"[para(2,3):\n]",
"[text(2,3):some text\nsome other text::\n]",
"[end-para:::True]",
"[end-ulist:::True]",
"[fcode-block(4,1):`:3::::::]",
"[text(5,1)::]",
"[end-fcode-block:::True]",
]
expected_gfm = """<ul>
<li>
<pre><code></code></pre>
</li>
<li>some text
some other text</li>
</ul>
<pre><code></code></pre>"""
# Act & Assert
act_and_assert(source_markdown, expected_gfm, expected_tokens)
@pytest.mark.skip
@pytest.mark.gfm
def test_fenced_code_blocks_extra_02a():
"""
Test case extra 02: variation of 1 where list already opened
"""
# Arrange
source_markdown = """- ```
- [foo]:
/url
```
"""
expected_tokens = [
"[ulist(1,1):-::2::\n]",
"[fcode-block(1,3):`:3::::::]",
"[link-ref-def(2,3):True::foo::\n:/url:::::]",
"[end-ulist:::True]",
"[fcode-block(4,1):`:3::::::]",
"[BLANK(5,1):]",
"[end-fcode-block:::True]",
]
expected_gfm = """<ul>
<li>
<pre><code></code></pre>
</li>
<li></li>
</ul>
<pre><code></code></pre>"""
# Act & Assert
act_and_assert(source_markdown, expected_gfm, expected_tokens)
@pytest.mark.gfm
def test_fenced_code_blocks_extra_03x():
"""
Test case extra 03: variation of 1 where list already opened but no new list item
NOTE: Small change to output to remove newline at pre/code at end.
"""
# Arrange
source_markdown = """- ```
some text
some other text
```
"""
expected_tokens = [
"[ulist(1,1):-::2:: ]",
"[fcode-block(1,3):`:3::::::]",
"[text(2,3):some text:]",
"[end-fcode-block:::True]",
"[end-ulist:::True]",
"[para(3,1):]",
"[text(3,1):some other text:]",
"[end-para:::False]",
"[fcode-block(4,1):`:3::::::]",
"[text(5,1)::]",
"[end-fcode-block:::True]",
]
expected_gfm = """<ul>
<li>
<pre><code>some text
</code></pre>
</li>
</ul>
<p>some other text</p>
<pre><code></code></pre>"""
# Act & Assert
act_and_assert(source_markdown, expected_gfm, expected_tokens, show_debug=False)
@pytest.mark.skip
@pytest.mark.gfm
def test_fenced_code_blocks_extra_03a():
"""
Test case extra 03: variation of 1 where list already opened but no new list item
"""
# Arrange
source_markdown = """- ```
[foo]:
/url
```
"""
expected_tokens = [
"[ulist(1,1):-::2:: \n\n]",
"[html-block(1,3)]",
"[text(1,3):<script>:]",
"[link-ref-def(2,3):True::foo::\n:/url:::::]",
"[text(4,1):</script>:]",
"[end-html-block:::False]",
"[end-ulist:::True]",
"[html-block(4,1)]",
"[text(4,1):</script>:]",
"[end-html-block:::False]",
"[BLANK(5,1):]",
]
expected_gfm = """<ul>
<li>
<pre><code>[foo]:
</code></pre>
</li>
</ul>
<p>/url</p>
<pre><code></code></pre>"""
# Act & Assert
act_and_assert(source_markdown, expected_gfm, expected_tokens)
@pytest.mark.gfm
def test_fenced_code_blocks_extra_04x():
"""
Test case extra 04: start a "block quote" within a fenced code block
"""
# Arrange
source_markdown = """```
> some text
some other text
```
"""
expected_tokens = [
"[fcode-block(1,1):`:3::::::]",
"[text(2,1):\a>\a>\a some text\nsome other text:]",
"[end-fcode-block::3:False]",
"[BLANK(5,1):]",
]
expected_gfm = """<pre><code>> some text
some other text
</code></pre>"""
# Act & Assert
act_and_assert(source_markdown, expected_gfm, expected_tokens)
@pytest.mark.gfm
def test_fenced_code_blocks_extra_04a():
"""
Test case extra 04: start a "block quote" within a fenced code block
"""
# Arrange
source_markdown = """```
> [foo]:
/url
```
"""
expected_tokens = [
"[fcode-block(1,1):`:3::::::]",
"[text(2,1):\a>\a>\a [foo]:\n/url:]",
"[end-fcode-block::3:False]",
"[BLANK(5,1):]",
]
expected_gfm = """<pre><code>> [foo]:
/url
</code></pre>"""
# Act & Assert
act_and_assert(source_markdown, expected_gfm, expected_tokens)
@pytest.mark.gfm
def test_fenced_code_blocks_extra_05x():
"""
Test case extra 05: variation of 4 where list already opened
"""
# Arrange
source_markdown = """> ```
> some text
some other text
```
"""
expected_tokens = [
"[block-quote(1,1)::> \n> ]",
"[fcode-block(1,3):`:3::::::]",
"[text(2,3):some text:]",
"[end-fcode-block:::True]",
"[end-block-quote:::True]",
"[para(3,1):]",
"[text(3,1):some other text:]",
"[end-para:::False]",
"[fcode-block(4,1):`:3::::::]",
"[text(5,1)::]",
"[end-fcode-block:::True]",
]
expected_gfm = """<blockquote>
<pre><code>some text
</code></pre>
</blockquote>
<p>some other text</p>
<pre><code></code></pre>"""
# Act & Assert
act_and_assert(source_markdown, expected_gfm, expected_tokens)
@pytest.mark.gfm
def test_fenced_code_blocks_extra_05a():
"""
Test case extra 05: variation of 4 where list already opened
NOTE: Small change to output to remove newline at pre/code at end.
"""
# Arrange
source_markdown = """> ```
> [foo]:
/url
```"""
expected_tokens = [
"[block-quote(1,1)::> \n> ]",
"[fcode-block(1,3):`:3::::::]",
"[text(2,3):[foo]::]",
"[end-fcode-block:::True]",
"[end-block-quote:::True]",
"[para(3,1):]",
"[text(3,1):/url:]",
"[end-para:::False]",
"[fcode-block(4,1):`:3::::::]",
"[end-fcode-block:::True]",
]
expected_gfm = """<blockquote>
<pre><code>[foo]:
</code></pre>
</blockquote>
<p>/url</p>
<pre><code></code></pre>"""
# Act & Assert
act_and_assert(source_markdown, expected_gfm, expected_tokens)
@pytest.mark.gfm
def test_fenced_code_blocks_extra_06xx():
"""
Test case extra 05: variation of 4 where block already opened but
no block character
"""
# Arrange
source_markdown = """> ```
some text
some other text
```
"""
expected_tokens = [
"[block-quote(1,1)::> ]",
"[fcode-block(1,3):`:3::::::]",
"[end-fcode-block:::True]",
"[end-block-quote:::True]",
"[para(2,3): \n]",
"[text(2,3):some text\nsome other text::\n]",
"[end-para:::False]",
"[fcode-block(4,1):`:3::::::]",
"[text(5,1)::]",
"[end-fcode-block:::True]",
]
expected_gfm = """<blockquote>
<pre><code></code></pre>
</blockquote>
<p>some text
some other text</p>
<pre><code></code></pre>"""
# Act & Assert
act_and_assert(source_markdown, expected_gfm, expected_tokens)
@pytest.mark.gfm
def test_fenced_code_blocks_extra_06xa():
"""
Test case extra 05: variation of 4 where block already opened but
no block character
"""
# Arrange
source_markdown = """> ```
some text
some other text
```"""
expected_tokens = [
"[block-quote(1,1)::> ]",
"[fcode-block(1,3):`:3::::::]",
"[end-fcode-block:::True]",
"[end-block-quote:::True]",
"[para(2,3): \n]",
"[text(2,3):some text\nsome other text::\n]",
"[end-para:::False]",
"[fcode-block(4,1):`:3::::::]",
"[end-fcode-block:::True]",
]
expected_gfm = """<blockquote>
<pre><code></code></pre>
</blockquote>
<p>some text
some other text</p>
<pre><code></code></pre>"""
# Act & Assert
act_and_assert(source_markdown, expected_gfm, expected_tokens)
@pytest.mark.skip
@pytest.mark.gfm
def test_fenced_code_blocks_extra_06a():
"""
Test case extra 05: variation of 4 where block already opened but
no block character
"""
# Arrange
source_markdown = """> ```
[foo]:
/url
```"""
expected_tokens = [
"[block-quote(1,1)::> ]",
"[fcode-block(1,3):`:3::::::]",
"[link-ref-def(2,3):True: :foo::\n:/url:::::]",
"[fcode-block(4,1):`:3::::::]",
"[end-fcode-block:::True]",
]
expected_gfm = """<blockquote>
<pre><code></code></pre>
</blockquote>
<pre><code></code></pre>"""
# Act & Assert
act_and_assert(source_markdown, expected_gfm, expected_tokens)
@pytest.mark.gfm
def test_fenced_code_blocks_extra_07():
"""
Test case extra 07: mixed block quotes and list blocks
"""
# Arrange
source_markdown = """```
* a
> b
>
* c
```"""
expected_tokens = [
"[fcode-block(1,1):`:3::::::]",
"[text(2,1):* a\n \a>\a>\a b\n \a>\a>\a\n* c:]",
"[end-fcode-block::3:False]",
]
expected_gfm = """<pre><code>* a
> b
>
* c
</code></pre>"""
# Act & Assert
act_and_assert(source_markdown, expected_gfm, expected_tokens)
@pytest.mark.gfm
def test_fenced_code_blocks_extra_08x():
"""
Test case extra 08: tests
"""
# Arrange
source_markdown = """```
abc
def
```"""
expected_tokens = [
"[fcode-block(1,1):`:3::::::]",
"[text(2,1):abc\n\x03\ndef:]",
"[end-fcode-block::3:False]",
]
expected_gfm = """<pre><code>abc
def
</code></pre>"""
# Act & Assert
act_and_assert(source_markdown, expected_gfm, expected_tokens)
@pytest.mark.gfm
def test_fenced_code_blocks_extra_08a():
"""
Test case extra 08: tests
"""
# Arrange
source_markdown = """```
abc
def
```"""
expected_tokens = [
"[fcode-block(1,1):`:3::::::]",
"[text(2,1):\nabc\n\x03\ndef:]",
"[end-fcode-block::3:False]",
]
expected_gfm = """<pre><code>
abc
def
</code></pre>"""
# Act & Assert
act_and_assert(source_markdown, expected_gfm, expected_tokens, show_debug=False)
@pytest.mark.gfm
def test_fenced_code_blocks_extra_08b():
"""
Test case extra 08: tests
"""
# Arrange
source_markdown = """```
abc
```
abc
def
```"""
expected_tokens = [
"[fcode-block(1,1):`:3::::::]",
"[text(2,1):abc:]",
"[end-fcode-block::3:False]",
"[BLANK(4,1):]",
"[para(5,1):]",
"[text(5,1):abc:]",
"[end-para:::True]",
"[BLANK(6,1):]",
"[para(7,1):]",
"[text(7,1):def:]",
"[end-para:::False]",
"[fcode-block(8,1):`:3::::::]",
"[end-fcode-block:::True]",
]
expected_gfm = """<pre><code>abc
</code></pre>
<p>abc</p>
<p>def</p>
<pre><code></code></pre>"""
# Act & Assert
act_and_assert(source_markdown, expected_gfm, expected_tokens)
@pytest.mark.gfm
def test_fenced_code_blocks_extra_08c():
"""
Test case extra 08: tests
"""
# Arrange
source_markdown = """```
\a\a
abc
def
```""".replace(
"\a", " "
)
expected_tokens = [
"[fcode-block(1,1):`:3::::::]",
"[text(2,3):\nabc\n\x03\ndef: ]",
"[end-fcode-block::3:False]",
]
expected_gfm = """<pre><code>\a\a
abc
def
</code></pre>""".replace(
"\a", " "
)
# Act & Assert
act_and_assert(source_markdown, expected_gfm, expected_tokens, show_debug=False)
| 20.543931
| 163
| 0.542508
| 5,170
| 40,451
| 4.079884
| 0.055706
| 0.068269
| 0.080311
| 0.05234
| 0.906509
| 0.89698
| 0.887024
| 0.872138
| 0.850898
| 0.840042
| 0
| 0.035196
| 0.240711
| 40,451
| 1,968
| 164
| 20.55437
| 0.65156
| 0.158933
| 0
| 0.75179
| 0
| 0.012729
| 0.384216
| 0.169159
| 0
| 0
| 0
| 0
| 0.055688
| 1
| 0.054893
| false
| 0
| 0.001591
| 0
| 0.059666
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 7
|
0da59443a8121bd1ee9aca8deaedb8499d4e983f
| 368
|
py
|
Python
|
003-comment-indention/indention.py
|
zaiddashti/python-tutorial
|
9ae325999a79f5f6471e4126995a2219e5ba33a3
|
[
"MIT"
] | null | null | null |
003-comment-indention/indention.py
|
zaiddashti/python-tutorial
|
9ae325999a79f5f6471e4126995a2219e5ba33a3
|
[
"MIT"
] | null | null | null |
003-comment-indention/indention.py
|
zaiddashti/python-tutorial
|
9ae325999a79f5f6471e4126995a2219e5ba33a3
|
[
"MIT"
] | null | null | null |
# correct
print("Line 1")
# wrong indention
print("Line 2")
print("Line 3")
# correct indention
if 5 > 4:
print("It true that 5 > 4")
# wrong indention
if 5 > 4:
print("It true that 5 > 4")
# wrong indention
if 5 > 4:
print("It true that 5 > 4")
# wrong indention
if 5 > 4:
print("It true that 5 > 4")
print("It true that 5 > 4")
| 12.689655
| 35
| 0.570652
| 62
| 368
| 3.387097
| 0.225806
| 0.085714
| 0.166667
| 0.214286
| 0.72381
| 0.72381
| 0.72381
| 0.72381
| 0.642857
| 0.642857
| 0
| 0.08046
| 0.290761
| 368
| 28
| 36
| 13.142857
| 0.724138
| 0.241848
| 0
| 0.75
| 0
| 0
| 0.397059
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | null | 0
| 0
| null | null | 0.666667
| 0
| 0
| 0
| null | 0
| 0
| 1
| 0
| 1
| 1
| 1
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 1
|
0
| 8
|
0db65363ebef3c3fec2fd7384580928f6b4c86e0
| 27,851
|
py
|
Python
|
ds4se/desc/metrics/java.py
|
rmclanton/ds4se
|
d9e1cf771a66478ac99c5341dbfeddbbf0abe5b2
|
[
"Apache-2.0"
] | null | null | null |
ds4se/desc/metrics/java.py
|
rmclanton/ds4se
|
d9e1cf771a66478ac99c5341dbfeddbbf0abe5b2
|
[
"Apache-2.0"
] | null | null | null |
ds4se/desc/metrics/java.py
|
rmclanton/ds4se
|
d9e1cf771a66478ac99c5341dbfeddbbf0abe5b2
|
[
"Apache-2.0"
] | null | null | null |
# AUTOGENERATED! DO NOT EDIT! File to edit: nbs/6.2_desc.metrics.java.ipynb (unless otherwise specified).
__all__ = ['get_unicode', 'simulate_getting_dataframes_from_mongo', 'add_mccabe_metrics', 'create_parser_builds',
'lang_keywords', 'find_class_nodes', 'find_method_nodes', 'find_field_names', 'find_string_in_text',
'distinct_field_calls', 'calculate_lcom5', 'add_lcom5', 'flatten_lol', 'display_numeric_col_stats',
'display_numeric_col_hist', 'get_unicode', 'simulate_getting_dataframes_from_mongo', 'add_mccabe_metrics',
'create_parser_builds', 'lang_keywords', 'find_class_nodes', 'find_method_nodes', 'find_field_names',
'find_string_in_text', 'distinct_field_calls', 'calculate_lcom5', 'add_lcom5', 'flatten_lol',
'display_numeric_col_stats', 'display_numeric_col_hist']
# Cell
# Imports
import pandas as pd
from numpy import mean, std
from statistics import median
from scipy.stats import sem, t
import lizard
import matplotlib.pyplot as plt
from tree_sitter import Language, Parser, Node
#Decoding files
import chardet
from bs4 import UnicodeDammit
# TODO: Remove when mongo call is implemented
import os
# Cell
'''
Detects file encoding and returns unicode.
Inspired by http://reinvantveer.github.io/2017/05/19/unicode-dammit.html
:param file_path: file path of file
:returns: unicode string of the file
:raises ValueError: empty or invalud csv file
'''
def get_unicode(file_path):
with open(file_path, 'rb') as f:
detection = chardet.detect(f.read())
enc = detection["encoding"]
if detection["encoding"] == "ascii":
with open(file_path, encoding="ascii") as f:
data = f.read()
elif detection["encoding"] == "ISO-8859-9":
with open(file_path, encoding="utf-8") as f:
enc = "utf-8"
data = f.read()
else:
try:
# Try to open as non unicode file
with open(file_path, encoding=detection["encoding"]) as f:
data = f.read()
except Exception as e:
raise ValueError(f"Cannot return dictionary from empty or invalid csv file {file_path} due to {e}")
if not data:
raise ValueError(f"Cannot return dictionary from empty or invalid csv file {file_path}")
return UnicodeDammit(data).unicode_markup, enc
# Cell
'''
Loads files from a specified folder into a pandas dataframe
:param folder_path: path to folder
:returns: corpus_data dataframe
'''
def simulate_getting_dataframes_from_mongo(folder_path):
corpus_data = {"system": [], "name": [], "ground_truth": [], "contents": [], "encoding": []}
for file in os.listdir(folder_path):
if not os.path.isdir(os.path.join(folder_path, file)) and file != ".DS_Store":
corpus_data["system"].append(None)
corpus_data["name"].append(file)
corpus_data["ground_truth"].append("src")
contents, enc = get_unicode(os.path.join(folder_path, file))
corpus_data['encoding'].append(enc)
corpus_data['contents'].append(contents)
corpus_df = pd.DataFrame(data = corpus_data)
return corpus_df
# Cell
'''
Adds information about function length and cyclomatic complexity for classes to a dataframe
:param df: dataframe
:param data_col: data
:param name_col: name
:returns: dataframe with added columns
'''
def add_mccabe_metrics(df, data_col, name_col):
num_funcs = []
class_ccn = []
avg_func_ccn = []
avg_func_nloc = []
for i in range(len(df)):
file_num_funcs = []
file_class_ccn = []
file_avg_func_ccn = []
file_avg_func_nloc = []
metrics = lizard.analyze_file.analyze_source_code(df[name_col][i], df[data_col][i])
class_dict = {}
for func in metrics.function_list:
class_name = '::'.join(func.name.split("::")[:-1])
if class_name in class_dict:
class_dict[class_name].append(func)
else:
class_dict[class_name] = [func]
for class_key in class_dict:
total_class_ccn = 0
total_class_nloc = 0
for func in class_dict[class_key]:
total_class_ccn += func.cyclomatic_complexity
total_class_nloc += func.length
file_num_funcs.append(len(class_dict[class_key]))
file_class_ccn.append(total_class_ccn)
file_avg_func_ccn.append(total_class_ccn/len(class_dict[class_key]))
file_avg_func_nloc.append(total_class_nloc/len(class_dict[class_key]))
num_funcs.append(file_num_funcs)
class_ccn.append(file_class_ccn)
avg_func_ccn.append(file_avg_func_ccn)
avg_func_nloc.append(file_avg_func_nloc)
df["num_funcs"] = num_funcs
df["class_ccn"] = class_ccn
df["avg_func_ccn"] = avg_func_ccn
df["avg_func_nloc"] = avg_func_nloc
return df
# Cell
'''
Creates a dictionary of tree-sitter parsers for select languages
:param path: None
:returns: dictionary with java and java language
'''
def create_parser_builds(path=None):
Language.build_library(
# Store the library in the `build` directory
'build/my-languages.so',
# Include one or more languages
[
'tree-sitter-java'
]
)
JAVA_LANGUAGE = Language('build/my-languages.so', 'java')
return {"java":JAVA_LANGUAGE}
# Cell
'''
Returns language specific keywords for a parser to find
'''
def lang_keywords():
keyword_dict = {}
keyword_dict["java"] = {"class": "class_declaration", "method":"method_declaration", "field_dec":"field_declaration", "field_name":"identifier"}
return keyword_dict
# Cell
'''
Recursively searches an AST for class nodes
:param root_node: root node to begin search
:param key: key to search
:returns: list of class nodes
'''
def find_class_nodes(root_node, key):
node_list = []
def rec_class_search(node):
if node.type == key["class"]:
node_list.append(node)
for child in node.children:
rec_class_search(child)
rec_class_search(root_node)
return node_list
# Cell
'''
Recursively searches an AST for method nodes
:param root_node: root node to begin search
:param key: key to search
:returns: list of method nodes
'''
def find_method_nodes(class_node, key):
node_list = []
def rec_method_search(node):
if node.type == key["method"]:
node_list.append(node)
if node.type != key["class"]:
for child in node.children:
rec_method_search(child)
for node in class_node.children:
rec_method_search(node)
return node_list
# Cell
'''
Finds the fields/attributes for a class AST
:param class_node: class node to search
:param file_bytes: list of bytes
:param key: key to search
:returns: list of class fields
'''
def find_field_names(class_node, file_bytes, key):
class_fields = []
def rec_name_search(node):
if node.type == key["field_name"]:
word = []
for i in range(node.start_byte, node.end_byte):
word.append(file_bytes[i])
class_fields.append(word)
else:
for child in node.children:
rec_name_search(child)
def rec_field_search(node):
if node.type == key["field_dec"]:
rec_name_search(node)
if node.type != key["class"]:
for child in node.children:
rec_field_search(child)
for node in class_node.children:
rec_field_search(node)
return class_fields
# Cell
'''
Counts the number of occurences of a byte-pattern array in a sample of code
:param node:
:param pattern: pattern to look for
:param file_bytes: byte-array
:returns: number of occurrences
:raises IndexError: if file_bytes[i] not good
'''
def find_string_in_text(node, pattern, file_bytes):
if len(node.children) > 0:
count = 0
for i in node.children:
count += find_string_in_text(i, pattern, file_bytes)
return count
else:
word = []
for i in range(node.start_byte, node.end_byte):
num_index_fails = 0
try:
word.append(file_bytes[i])
except IndexError:
num_index_fails += 1
if(num_index_fails):
print(f"INDEX ERROR ({num_index_fails} times)")
print("Start byte:", node.start_byte, "End byte:", node.end_byte, "Word:", word)
if word == pattern:
return 1
else:
return 0
# Cell
'''
Recursively searches an AST for method nodes
:param class_node:
:param field_names:
:param file_bytes: byte array
:param key: key to search
:returns: number of total distinct calls
'''
def distinct_field_calls(class_node, field_names, file_bytes, key):
total_distinct_calls = []
def rec_method_search(node):
if node.type == key["method"]:
distinct_method_field_calls = 0
for field in field_names:
if find_string_in_text(node, field, file_bytes):
distinct_method_field_calls += 1
total_distinct_calls.append(distinct_method_field_calls)
if node.type != key["class"]:
for child in node.children:
rec_method_search(child)
for node in class_node.children:
rec_method_search(node)
return len(total_distinct_calls)
# Cell
'''
Parses the syntax tree of code to calculate the LCOM5 of its classes
:param tree: syntax tree
:param extension: file extension type
:param file_bytes: list of bytes
:param name: not used
:returns: LCOM5 list
'''
def calculate_lcom5(tree, extension, file_bytes, name):
keyword_dict = lang_keywords()
if extension not in keyword_dict:
print(f"Tried to get LCOM5 of file with unsupported extension '.{extension}', 0 assigned to column.")
return ["Undefined"]
root_node = tree.root_node
keywords = keyword_dict[extension]
class_nodes = find_class_nodes(root_node, keywords)
class_method_nodes = []
class_field_names = []
class_dfc = [] # Distinct field calls, as per the definition of LCOM5
for node in enumerate(class_nodes):
class_method_nodes.append(find_method_nodes(node[1], keywords))
class_field_names.append(find_field_names(node[1], file_bytes, keywords))
class_dfc.append(distinct_field_calls(node[1], class_field_names[node[0]], file_bytes, keywords))
lcom5_list = []
for j in range(len(class_nodes)):
num_fields = len(class_field_names[j])
num_meths = len(class_method_nodes[j])
num_dac = class_dfc[j]
numerator = num_dac - (num_meths*num_fields)
denominator = num_fields - (num_meths*num_fields)
if denominator == 0:
lcom5_list.append("Undefined")
else:
lcom5_list.append(numerator/denominator)
return lcom5_list
# Cell
'''
Adds a column with the LCOM5 of each class of each file to a dataframe
:param df: input dataframe
:param col: not used
:returns: updated dataframe
'''
def add_lcom5(df, col):
lang_builds = create_parser_builds()
parser = Parser()
class_lcom5 = []
for i in range(len(df)):
ext = df["name"][i].split('.')[-1]
parser.set_language(lang_builds[ext])
enc = df["encoding"][i]
tree = parser.parse(bytes(df["contents"][i], df["encoding"][i]))
class_lcom5.append(calculate_lcom5(tree, ext, bytes(df["contents"][i], df["encoding"][i]), df["name"][i]))
df["class_lcom5"] = class_lcom5
return df
# Cell
'''
Takes in a list of lists and flattens it, returning a list of each entry
:param list_list: list of lists to flatten
:returns: flattened list
'''
def flatten_lol(list_list):
flattened_list = []
for sublist in list_list:
for entry in sublist:
flattened_list.append(entry)
return flattened_list
# Cell
'''
Computes statistical metrics about the entries in a dataframe column or list
:param col: dataframe column or list
:param conf: confidence level
:param sig_figs: significant figures for rounding
:param clean: only look at ints, floats, or complex in col
:param verbose_clean: displays number of non-numeric entries removed
'''
def display_numeric_col_stats(col, conf = 0.95, sig_figs = 4, clean=True, verbose_clean=False):
previous_length = len(col)
numeric_types = [int, float, complex]
if clean: col = [x for x in col if type(x) in numeric_types]
if verbose_clean: print(f"Cleaning removed {previous_length - len(col)} non-numeric entries")
if len(col) < 1:
print("Error, data must contain at least one valid entry to display statistics")
return
print("Min =", round(min(col), sig_figs))
print("Max =", round(max(col), sig_figs))
print("Average =", round(mean(col), sig_figs))
print("Median =", round(median(col), sig_figs))
print("Standard Deviation =", round(std(col), sig_figs))
n = len(col)
m = mean(col)
std_err = sem(col)
h = std_err * t.ppf((1 + conf) / 2, n - 1)
start = m - h
end = m + h
print(f"{conf} of data points fall between {round(start, sig_figs)} and {round(end, sig_figs)}")
# Cell
'''
Displays a histogram with a customized number of bins for the data in a specified dataframe column or list
:param col: df column or list to plot
:param col_name: name of col for labeling
:param num_bins: number of bins
:param clean: only look at ints, floats, or complex in col
:param verbose_clean: displays non-numeric entries removed
'''
def display_numeric_col_hist(col, col_name="Metric", num_bins=20, clean=True, verbose_clean=False):
previous_length = len(col)
numeric_types = [int, float, complex]
if clean: col = [x for x in col if type(x) in numeric_types]
if verbose_clean: print(f"Cleaning removed {previous_length - len(col)} non-numeric entries")
if len(col) < 1:
print("Error, data must contain at least one valid entry to display histogram")
return
rng = max(col) - min(col)
num = len(col)
stnd_dev = std(col)
plt.hist(col, num_bins, color="blue", alpha=0.5, edgecolor="black", linewidth=1.0)
plt.title(col_name + " Histogram")
plt.ylabel("Value Range Occurrences")
plt.xlabel(col_name)
plt.show()
# Cell
# Imports
import pandas as pd
from numpy import mean, std
from statistics import median
from scipy.stats import sem, t
import lizard
import matplotlib.pyplot as plt
from tree_sitter import Language, Parser, Node
#Decoding files
import chardet
from bs4 import UnicodeDammit
# TODO: Remove when mongo call is implemented
import os
# Cell
'''
Detects file encoding and returns unicode.
Inspired by http://reinvantveer.github.io/2017/05/19/unicode-dammit.html
:param file_path: file path of file
:returns: unicode string of the file
:raises ValueError: empty or invalud csv file
'''
def get_unicode(file_path):
with open(file_path, 'rb') as f:
detection = chardet.detect(f.read())
enc = detection["encoding"]
if detection["encoding"] == "ascii":
with open(file_path, encoding="ascii") as f:
data = f.read()
elif detection["encoding"] == "ISO-8859-9":
with open(file_path, encoding="utf-8") as f:
enc = "utf-8"
data = f.read()
else:
try:
# Try to open as non unicode file
with open(file_path, encoding=detection["encoding"]) as f:
data = f.read()
except Exception as e:
raise ValueError(f"Cannot return dictionary from empty or invalid csv file {file_path} due to {e}")
if not data:
raise ValueError(f"Cannot return dictionary from empty or invalid csv file {file_path}")
return UnicodeDammit(data).unicode_markup, enc
# Cell
'''
Loads files from a specified folder into a pandas dataframe
:param folder_path: path to folder
:returns: corpus_data dataframe
'''
def simulate_getting_dataframes_from_mongo(folder_path):
corpus_data = {"system": [], "name": [], "ground_truth": [], "contents": [], "encoding": []}
for file in os.listdir(folder_path):
if not os.path.isdir(os.path.join(folder_path, file)) and file != ".DS_Store":
corpus_data["system"].append(None)
corpus_data["name"].append(file)
corpus_data["ground_truth"].append("src")
contents, enc = get_unicode(os.path.join(folder_path, file))
corpus_data['encoding'].append(enc)
corpus_data['contents'].append(contents)
corpus_df = pd.DataFrame(data = corpus_data)
return corpus_df
# Cell
'''
Adds information about function length and cyclomatic complexity for classes to a dataframe
:param df: dataframe
:param data_col:
:param name_col:
:returns: dataframe with added columns
'''
def add_mccabe_metrics(df, data_col, name_col):
num_funcs = []
class_ccn = []
avg_func_ccn = []
avg_func_nloc = []
for i in range(len(df)):
file_num_funcs = []
file_class_ccn = []
file_avg_func_ccn = []
file_avg_func_nloc = []
metrics = lizard.analyze_file.analyze_source_code(df[name_col][i], df[data_col][i])
class_dict = {}
for func in metrics.function_list:
class_name = '::'.join(func.name.split("::")[:-1])
if class_name in class_dict:
class_dict[class_name].append(func)
else:
class_dict[class_name] = [func]
for class_key in class_dict:
total_class_ccn = 0
total_class_nloc = 0
for func in class_dict[class_key]:
total_class_ccn += func.cyclomatic_complexity
total_class_nloc += func.length
file_num_funcs.append(len(class_dict[class_key]))
file_class_ccn.append(total_class_ccn)
file_avg_func_ccn.append(total_class_ccn/len(class_dict[class_key]))
file_avg_func_nloc.append(total_class_nloc/len(class_dict[class_key]))
num_funcs.append(file_num_funcs)
class_ccn.append(file_class_ccn)
avg_func_ccn.append(file_avg_func_ccn)
avg_func_nloc.append(file_avg_func_nloc)
df["num_funcs"] = num_funcs
df["class_ccn"] = class_ccn
df["avg_func_ccn"] = avg_func_ccn
df["avg_func_nloc"] = avg_func_nloc
return df
# Cell
'''
Creates a dictionary of tree-sitter parsers for select languages
:param path: None
:returns: dictionary with java and java language
'''
def create_parser_builds(path=None):
"""Creates a dictionary of tree-sitter parsers for select languages"""
Language.build_library(
# Store the library in the `build` directory
'build/my-languages.so',
# Include one or more languages
[
'tree-sitter-java'
]
)
JAVA_LANGUAGE = Language('build/my-languages.so', 'java')
return {"java":JAVA_LANGUAGE}
# Cell
'''
Returns language specific keywords for a parser to find
'''
def lang_keywords():
keyword_dict = {}
keyword_dict["java"] = {"class": "class_declaration", "method":"method_declaration", "field_dec":"field_declaration", "field_name":"identifier"}
return keyword_dict
# Cell
'''
Recursively searches an AST for class nodes
:param root_node: root node to begin search
:param key: key to search
:returns: list of class nodes
'''
def find_class_nodes(root_node, key):
node_list = []
def rec_class_search(node):
if node.type == key["class"]:
node_list.append(node)
for child in node.children:
rec_class_search(child)
rec_class_search(root_node)
return node_list
# Cell
'''
Recursively searches an AST for method nodes
:param root_node: root node to begin search
:param key: key to search
:returns: list of method nodes
'''
def find_method_nodes(class_node, key):
node_list = []
def rec_method_search(node):
if node.type == key["method"]:
node_list.append(node)
if node.type != key["class"]:
for child in node.children:
rec_method_search(child)
for node in class_node.children:
rec_method_search(node)
return node_list
# Cell
'''
Finds the fields/attributes for a class AST
:param class_node: class node to search
:param file_bytes: list of bytes
:param key: key to search
:returns: list of class fields
'''
def find_field_names(class_node, file_bytes, key):
class_fields = []
def rec_name_search(node):
if node.type == key["field_name"]:
word = []
for i in range(node.start_byte, node.end_byte):
word.append(file_bytes[i])
class_fields.append(word)
else:
for child in node.children:
rec_name_search(child)
def rec_field_search(node):
if node.type == key["field_dec"]:
rec_name_search(node)
if node.type != key["class"]:
for child in node.children:
rec_field_search(child)
for node in class_node.children:
rec_field_search(node)
return class_fields
# Cell
'''
Counts the number of occurences of a byte-pattern array in a sample of code
:param node:
:param pattern: pattern to look for
:param file_bytes: byte-array
:returns: number of occurrences
:raises IndexError: if file_bytes[i] not good
'''
def find_string_in_text(node, pattern, file_bytes):
if len(node.children) > 0:
count = 0
for i in node.children:
count += find_string_in_text(i, pattern, file_bytes)
return count
else:
word = []
for i in range(node.start_byte, node.end_byte):
num_index_fails = 0
try:
word.append(file_bytes[i])
except IndexError:
num_index_fails += 1
if(num_index_fails):
print(f"INDEX ERROR ({num_index_fails} times)")
print("Start byte:", node.start_byte, "End byte:", node.end_byte, "Word:", word)
if word == pattern:
return 1
else:
return 0
# Cell
'''
Recursively searches an AST for method nodes
:param class_node:
:param field_names:
:param file_bytes: byte array
:param key: key to search
:returns: number of total distinct calls
'''
def distinct_field_calls(class_node, field_names, file_bytes, key):
total_distinct_calls = []
def rec_method_search(node):
if node.type == key["method"]:
distinct_method_field_calls = 0
for field in field_names:
if find_string_in_text(node, field, file_bytes):
distinct_method_field_calls += 1
total_distinct_calls.append(distinct_method_field_calls)
if node.type != key["class"]:
for child in node.children:
rec_method_search(child)
for node in class_node.children:
rec_method_search(node)
return len(total_distinct_calls)
# Cell
'''
Parses the syntax tree of code to calculate the LCOM5 of its classes
:param tree: syntax tree
:param extension: file extension type
:param file_bytes: list of bytes
:param name: not used
:returns: LCOM5 list
'''
def calculate_lcom5(tree, extension, file_bytes, name):
keyword_dict = lang_keywords()
if extension not in keyword_dict:
print(f"Tried to get LCOM5 of file with unsupported extension '.{extension}', 0 assigned to column.")
return ["Undefined"]
root_node = tree.root_node
keywords = keyword_dict[extension]
class_nodes = find_class_nodes(root_node, keywords)
class_method_nodes = []
class_field_names = []
class_dfc = [] # Distinct field calls, as per the definition of LCOM5
for node in enumerate(class_nodes):
class_method_nodes.append(find_method_nodes(node[1], keywords))
class_field_names.append(find_field_names(node[1], file_bytes, keywords))
class_dfc.append(distinct_field_calls(node[1], class_field_names[node[0]], file_bytes, keywords))
lcom5_list = []
for j in range(len(class_nodes)):
num_fields = len(class_field_names[j])
num_meths = len(class_method_nodes[j])
num_dac = class_dfc[j]
numerator = num_dac - (num_meths*num_fields)
denominator = num_fields - (num_meths*num_fields)
if denominator == 0:
lcom5_list.append("Undefined")
else:
lcom5_list.append(numerator/denominator)
return lcom5_list
# Cell
'''
Adds a column with the LCOM5 of each class of each file to a dataframe
:param df: input dataframe
:param col: not used
:returns: updated dataframe
'''
def add_lcom5(df, col):
lang_builds = create_parser_builds()
parser = Parser()
class_lcom5 = []
for i in range(len(df)):
ext = df["name"][i].split('.')[-1]
parser.set_language(lang_builds[ext])
enc = df["encoding"][i]
tree = parser.parse(bytes(df["contents"][i], df["encoding"][i]))
class_lcom5.append(calculate_lcom5(tree, ext, bytes(df["contents"][i], df["encoding"][i]), df["name"][i]))
df["class_lcom5"] = class_lcom5
return df
# Cell
'''
Takes in a list of lists and flattens it, returning a list of each entry
:param list_list: list of lists to flatten
:returns: flattened list
'''
def flatten_lol(list_list):
flattened_list = []
for sublist in list_list:
for entry in sublist:
flattened_list.append(entry)
return flattened_list
# Cell
'''
Computes statistical metrics about the entries in a dataframe column or list
:param col: dataframe column or list
:param conf: confidence level
:param sig_figs: sig_figs for rounding
:param clean: only look at ints, floats, or complex in col
:param verbose_clean: displays non-numeric entries removed
'''
def display_numeric_col_stats(col, conf = 0.95, sig_figs = 4, clean=True, verbose_clean=False):
previous_length = len(col)
numeric_types = [int, float, complex]
if clean: col = [x for x in col if type(x) in numeric_types]
if verbose_clean: print(f"Cleaning removed {previous_length - len(col)} non-numeric entries")
if len(col) < 1:
print("Error, data must contain at least one valid entry to display statistics")
return
print("Min =", round(min(col), sig_figs))
print("Max =", round(max(col), sig_figs))
print("Average =", round(mean(col), sig_figs))
print("Median =", round(median(col), sig_figs))
print("Standard Deviation =", round(std(col), sig_figs))
n = len(col)
m = mean(col)
std_err = sem(col)
h = std_err * t.ppf((1 + conf) / 2, n - 1)
start = m - h
end = m + h
print(f"{conf} of data points fall between {round(start, sig_figs)} and {round(end, sig_figs)}")
# Cell
'''
Displays a histogram with a customized number of bins for the data in a specified dataframe column or list
:param col: df column or list to plot
:param col_name: name of col
:param num_bins: number of bins
:param clean: only look at ints, floats, or complex in col
:param verbose_clean: displays non-numeric entries removed
'''
def display_numeric_col_hist(col, col_name="Metric", num_bins=20, clean=True, verbose_clean=False):
previous_length = len(col)
numeric_types = [int, float, complex]
if clean: col = [x for x in col if type(x) in numeric_types]
if verbose_clean: print(f"Cleaning removed {previous_length - len(col)} non-numeric entries")
if len(col) < 1:
print("Error, data must contain at least one valid entry to display histogram")
return
rng = max(col) - min(col)
num = len(col)
stnd_dev = std(col)
plt.hist(col, num_bins, color="blue", alpha=0.5, edgecolor="black", linewidth=1.0)
plt.title(col_name + " Histogram")
plt.ylabel("Value Range Occurrences")
plt.xlabel(col_name)
plt.show()
| 32.123414
| 148
| 0.662849
| 3,926
| 27,851
| 4.496943
| 0.08813
| 0.015293
| 0.009063
| 0.011781
| 0.990937
| 0.990937
| 0.990937
| 0.990937
| 0.990937
| 0.990937
| 0
| 0.00636
| 0.2322
| 27,851
| 867
| 149
| 32.123414
| 0.819257
| 0.027898
| 0
| 0.974257
| 1
| 0.00396
| 0.142353
| 0.012129
| 0
| 0
| 0
| 0.002307
| 0
| 1
| 0.079208
| false
| 0
| 0.039604
| 0
| 0.190099
| 0.051485
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 1
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 8
|
2179e470aa70227aee8187be0cd9e6194ab23f15
| 14,817
|
py
|
Python
|
regular_language/unit_tests/test_ast_nonclass_functions.py
|
ShoYamanishi/nlpregex
|
795b36d5a2fad8bc25264b2093ffa9c3723b282b
|
[
"MIT"
] | 1
|
2021-12-03T07:20:18.000Z
|
2021-12-03T07:20:18.000Z
|
regular_language/unit_tests/test_ast_nonclass_functions.py
|
ShoYamanishi/nlpregex
|
795b36d5a2fad8bc25264b2093ffa9c3723b282b
|
[
"MIT"
] | null | null | null |
regular_language/unit_tests/test_ast_nonclass_functions.py
|
ShoYamanishi/nlpregex
|
795b36d5a2fad8bc25264b2093ffa9c3723b282b
|
[
"MIT"
] | null | null | null |
#!/usr/bin/env python
# -*- coding: utf-8 -*-
""" Unit Tests for ast.py."""
import unittest
import nlpregex.regular_language.ast
class test_cross_product( unittest.TestCase ):
def test_cross_product_0_0(self):
list01 = []
list02 = []
list03 = nlpregex.regular_language.ast.cross_product( list01, list02, '#' )
self.assertEqual( len(list03), 0 )
def test_cross_product_0_1(self):
list01 = []
list02 = ['a']
list03 = nlpregex.regular_language.ast.cross_product( list01, list02, '#' )
self.assertEqual( len(list03), 1 )
self.assertEqual( list03[0], 'a' )
def test_cross_product_0_2(self):
list01 = []
list02 = ['a', 'b']
list03 = nlpregex.regular_language.ast.cross_product( list01, list02, '#' )
self.assertEqual( len(list03), 2 )
self.assertEqual( list03[0], 'a' )
self.assertEqual( list03[1], 'b' )
def test_cross_product_0_5(self):
list01 = []
list02 = ['a', 'b', 'c', 'd', 'e' ]
list03 = nlpregex.regular_language.ast.cross_product( list01, list02, '#' )
self.assertEqual( len(list03), 5 )
self.assertEqual( list03[0], 'a' )
self.assertEqual( list03[1], 'b' )
self.assertEqual( list03[2], 'c' )
self.assertEqual( list03[3], 'd' )
self.assertEqual( list03[4], 'e' )
def test_cross_product_1_0(self):
list01 = ['a']
list02 = []
list03 = nlpregex.regular_language.ast.cross_product( list01, list02, '#' )
self.assertEqual( len(list03), 1 )
self.assertEqual( list03[0], 'a' )
def test_cross_product_2_0(self):
list01 = ['a', 'b']
list02 = []
list03 = nlpregex.regular_language.ast.cross_product( list01, list02, '#' )
self.assertEqual( len(list03), 2 )
self.assertEqual( list03[0], 'a' )
self.assertEqual( list03[1], 'b' )
def test_cross_product_5_0(self):
list01 = ['a', 'b', 'c', 'd', 'e' ]
list02 = []
list03 = nlpregex.regular_language.ast.cross_product( list01, list02, '#' )
self.assertEqual( len(list03), 5 )
self.assertEqual( list03[0], 'a' )
self.assertEqual( list03[1], 'b' )
self.assertEqual( list03[2], 'c' )
self.assertEqual( list03[3], 'd' )
self.assertEqual( list03[4], 'e' )
def test_cross_product_1_1(self):
list01 = ['' ]
list02 = ['a']
list03 = nlpregex.regular_language.ast.cross_product( list01, list02, '#' )
self.assertEqual( len(list03), 1 )
self.assertEqual( list03[0], 'a' )
def test_cross_product_1_2(self):
list01 = ['' ]
list02 = ['a', 'b']
list03 = nlpregex.regular_language.ast.cross_product( list01, list02, '#' )
self.assertEqual( len(list03), 2 )
self.assertEqual( list03[0], 'a' )
self.assertEqual( list03[1], 'b' )
def test_cross_product_1_5(self):
list01 = ['' ]
list02 = ['a', 'b', 'c', 'd', 'e']
list03 = nlpregex.regular_language.ast.cross_product( list01, list02, '#' )
self.assertEqual( len(list03), 5 )
self.assertEqual( list03[0], 'a' )
self.assertEqual( list03[1], 'b' )
self.assertEqual( list03[2], 'c' )
self.assertEqual( list03[3], 'd' )
self.assertEqual( list03[4], 'e' )
def test_cross_product_1_1_p2(self):
list01 = ['a']
list02 = ['' ]
list03 = nlpregex.regular_language.ast.cross_product( list01, list02, '#' )
self.assertEqual( len(list03), 1 )
self.assertEqual( list03[0], 'a' )
def test_cross_product_2_1(self):
list01 = ['a', 'b']
list02 = ['' ]
list03 = nlpregex.regular_language.ast.cross_product( list01, list02, '#' )
self.assertEqual( len(list03), 2 )
self.assertEqual( list03[0], 'a' )
self.assertEqual( list03[1], 'b' )
def test_cross_product_5_1(self):
list01 = ['a', 'b', 'c', 'd', 'e']
list02 = ['' ]
list03 = nlpregex.regular_language.ast.cross_product( list01, list02, '#' )
self.assertEqual( len(list03), 5 )
self.assertEqual( list03[0], 'a' )
self.assertEqual( list03[1], 'b' )
self.assertEqual( list03[2], 'c' )
self.assertEqual( list03[3], 'd' )
self.assertEqual( list03[4], 'e' )
def test_cross_product_pattern_100(self):
list01 = ['a', '']
list02 = ['x', 'y', 'z' ]
list03 = nlpregex.regular_language.ast.cross_product( list01, list02, '#' )
self.assertEqual( len(list03), 6 )
self.assertEqual( list03[0], 'a#x' )
self.assertEqual( list03[1], 'a#y' )
self.assertEqual( list03[2], 'a#z' )
self.assertEqual( list03[3], 'x' )
self.assertEqual( list03[4], 'y' )
self.assertEqual( list03[5], 'z' )
def test_cross_product_pattern_101(self):
list01 = ['ab', 'cd', '456']
list02 = ['mno', 'zyx', '', '123' ]
list03 = nlpregex.regular_language.ast.cross_product( list01, list02, ' ' )
self.assertEqual( len(list03), 12 )
self.assertEqual( list03[0], 'ab mno' )
self.assertEqual( list03[1], 'ab zyx' )
self.assertEqual( list03[2], 'ab' )
self.assertEqual( list03[3], 'ab 123' )
self.assertEqual( list03[4], 'cd mno' )
self.assertEqual( list03[5], 'cd zyx' )
self.assertEqual( list03[6], 'cd' )
self.assertEqual( list03[7], 'cd 123' )
self.assertEqual( list03[0], 'ab mno' )
self.assertEqual( list03[1], 'ab zyx' )
self.assertEqual( list03[2], 'ab' )
self.assertEqual( list03[3], 'ab 123' )
class test_create_sequence_subtree( unittest.TestCase ):
def test_create_sequence_subtree_0000(self):
ast01 = nlpregex.regular_language.ast.AST()
node01 = nlpregex.regular_language.ast.ASTNode( 'union', 'node01' )
node02 = nlpregex.regular_language.ast.ASTNode( 'union', 'node02' )
node03 = nlpregex.regular_language.ast.ASTNode( 'terminal', 'node03' )
node04 = nlpregex.regular_language.ast.ASTNode( 'terminal', 'node04' )
node05 = nlpregex.regular_language.ast.ASTNode( 'terminal', 'node05' )
node06 = nlpregex.regular_language.ast.ASTNode( 'terminal', 'node06' )
node01.add_to_graph(ast01)
node02.add_to_graph(ast01)
node03.add_to_graph(ast01)
node04.add_to_graph(ast01)
node05.add_to_graph(ast01)
node06.add_to_graph(ast01)
edge01 = nlpregex.regular_language.ast.ASTEdge()
edge02 = nlpregex.regular_language.ast.ASTEdge()
edge03 = nlpregex.regular_language.ast.ASTEdge()
edge04 = nlpregex.regular_language.ast.ASTEdge()
edge01.add_to_graph( ast01, node01, node03, "directed" )
edge02.add_to_graph( ast01, node01, node04, "directed" )
edge03.add_to_graph( ast01, node02, node05, "directed" )
edge04.add_to_graph( ast01, node02, node06, "directed" )
node07 = nlpregex.regular_language.ast.create_sequence_subtree( node01, node02, ast01 )
self.assertEqual( node07.ast_node_type, 'sequence' )
self.assertEqual( node07.content, '' )
self.assertEqual( node07.repeat_min, 1 )
self.assertEqual( node07.repeat_max, 1 )
self.assertEqual( node07.phrase_count, 1 )
self.assertEqual( node07.text_width, 0 )
self.assertEqual( len(node07.out_token_pre), 0 )
self.assertEqual( len(node07.out_token_post), 0 )
self.assertEqual( node07.out_degree(), 2 )
self.assertEqual( node07.in_degree(), 0 )
edge05 = node07.out_neighbors()[0]
edge06 = node07.out_neighbors()[1]
node08 = edge05.other_node_of(node07)
node09 = edge06.other_node_of(node07)
self.assertEqual( node01, node08 )
self.assertEqual( node02, node09 )
class test_create_finite_repeat_subtree( unittest.TestCase ):
def test_create_finite_repeat_subtree_0000(self):
ast01 = nlpregex.regular_language.ast.AST()
node01 = nlpregex.regular_language.ast.ASTNode( 'union', 'node01' )
node02 = nlpregex.regular_language.ast.ASTNode( 'terminal', 'node03' )
node03 = nlpregex.regular_language.ast.ASTNode( 'terminal', 'node04' )
node04 = nlpregex.regular_language.ast.ASTNode( 'terminal', 'node05' )
node05 = nlpregex.regular_language.ast.ASTNode( 'terminal', 'node06' )
node01.add_to_graph(ast01)
node02.add_to_graph(ast01)
node03.add_to_graph(ast01)
node04.add_to_graph(ast01)
node05.add_to_graph(ast01)
edge01 = nlpregex.regular_language.ast.ASTEdge()
edge02 = nlpregex.regular_language.ast.ASTEdge()
edge03 = nlpregex.regular_language.ast.ASTEdge()
edge04 = nlpregex.regular_language.ast.ASTEdge()
edge01.add_to_graph( ast01, node01, node02, "directed" )
edge02.add_to_graph( ast01, node01, node03, "directed" )
edge03.add_to_graph( ast01, node01, node04, "directed" )
edge04.add_to_graph( ast01, node01, node05, "directed" )
node06 = nlpregex.regular_language.ast.create_finite_repeat_subtree( node01, 2, 3, ast01 )
self.assertEqual( node06.ast_node_type, 'finite repeat' )
self.assertEqual( node06.content, '' )
self.assertEqual( node06.repeat_min, 2 )
self.assertEqual( node06.repeat_max, 3 )
self.assertEqual( node06.phrase_count, 1 )
self.assertEqual( node06.text_width, 0 )
self.assertEqual( len(node06.out_token_pre), 0 )
self.assertEqual( len(node06.out_token_post), 0 )
self.assertEqual( node06.out_degree(), 1 )
self.assertEqual( node06.in_degree(), 0 )
edge05 = node06.out_neighbors()[0]
node07 = edge05.other_node_of(node06)
self.assertEqual( node01, node07 )
class test_create_infinite_repeat_subtree( unittest.TestCase ):
def test_create_infinite_repeat_subtree_0000(self):
ast01 = nlpregex.regular_language.ast.AST()
node01 = nlpregex.regular_language.ast.ASTNode( 'union', 'node01' )
node02 = nlpregex.regular_language.ast.ASTNode( 'terminal', 'node03' )
node03 = nlpregex.regular_language.ast.ASTNode( 'terminal', 'node04' )
node04 = nlpregex.regular_language.ast.ASTNode( 'terminal', 'node05' )
node05 = nlpregex.regular_language.ast.ASTNode( 'terminal', 'node06' )
node01.add_to_graph(ast01)
node02.add_to_graph(ast01)
node03.add_to_graph(ast01)
node04.add_to_graph(ast01)
node05.add_to_graph(ast01)
edge01 = nlpregex.regular_language.ast.ASTEdge()
edge02 = nlpregex.regular_language.ast.ASTEdge()
edge03 = nlpregex.regular_language.ast.ASTEdge()
edge04 = nlpregex.regular_language.ast.ASTEdge()
edge01.add_to_graph( ast01, node01, node02, "directed" )
edge02.add_to_graph( ast01, node01, node03, "directed" )
edge03.add_to_graph( ast01, node01, node04, "directed" )
edge04.add_to_graph( ast01, node01, node05, "directed" )
node06 = nlpregex.regular_language.ast.create_infinite_repeat_subtree( node01, 'PLUS', ast01 )
self.assertEqual( node06.ast_node_type, 'infinite repeat' )
self.assertEqual( node06.content, '' )
self.assertEqual( node06.repeat_min, 1 )
self.assertEqual( node06.repeat_max, -1 )
self.assertEqual( node06.phrase_count, 1 )
self.assertEqual( node06.text_width, 0 )
self.assertEqual( len(node06.out_token_pre), 0 )
self.assertEqual( len(node06.out_token_post), 0 )
self.assertEqual( node06.out_degree(), 1 )
self.assertEqual( node06.in_degree(), 0 )
edge05 = node06.out_neighbors()[0]
node07 = edge05.other_node_of(node06)
self.assertEqual( node01, node07 )
def test_create_infinite_repeat_subtree_0001(self):
ast01 = nlpregex.regular_language.ast.AST()
node01 = nlpregex.regular_language.ast.ASTNode( 'union', 'node01' )
node02 = nlpregex.regular_language.ast.ASTNode( 'terminal', 'node03' )
node03 = nlpregex.regular_language.ast.ASTNode( 'terminal', 'node04' )
node04 = nlpregex.regular_language.ast.ASTNode( 'terminal', 'node05' )
node05 = nlpregex.regular_language.ast.ASTNode( 'terminal', 'node06' )
node01.add_to_graph(ast01)
node02.add_to_graph(ast01)
node03.add_to_graph(ast01)
node04.add_to_graph(ast01)
node05.add_to_graph(ast01)
edge01 = nlpregex.regular_language.ast.ASTEdge()
edge02 = nlpregex.regular_language.ast.ASTEdge()
edge03 = nlpregex.regular_language.ast.ASTEdge()
edge04 = nlpregex.regular_language.ast.ASTEdge()
edge01.add_to_graph( ast01, node01, node02, "directed" )
edge02.add_to_graph( ast01, node01, node03, "directed" )
edge03.add_to_graph( ast01, node01, node04, "directed" )
edge04.add_to_graph( ast01, node01, node05, "directed" )
node06 = nlpregex.regular_language.ast.create_infinite_repeat_subtree( node01, 'STAR', ast01 )
self.assertEqual( node06.ast_node_type, 'infinite repeat' )
self.assertEqual( node06.content, '' )
self.assertEqual( node06.repeat_min, 0 )
self.assertEqual( node06.repeat_max, -1 )
self.assertEqual( node06.phrase_count, 1 )
self.assertEqual( node06.text_width, 0 )
self.assertEqual( len(node06.out_token_pre), 0 )
self.assertEqual( len(node06.out_token_post), 0 )
self.assertEqual( node06.out_degree(), 1 )
self.assertEqual( node06.in_degree(), 0 )
edge05 = node06.out_neighbors()[0]
node07 = edge05.other_node_of(node06)
self.assertEqual( node01, node07 )
if __name__ == '__main__':
unittest.main()
| 37.135338
| 103
| 0.59317
| 1,633
| 14,817
| 5.183099
| 0.067973
| 0.194943
| 0.165761
| 0.187382
| 0.891304
| 0.855624
| 0.836484
| 0.788752
| 0.788752
| 0.788752
| 0
| 0.090176
| 0.280016
| 14,817
| 398
| 104
| 37.228643
| 0.703225
| 0.004387
| 0
| 0.708029
| 0
| 0
| 0.04496
| 0
| 0
| 0
| 0
| 0
| 0.40146
| 1
| 0.069343
| false
| 0
| 0.007299
| 0
| 0.091241
| 0
| 0
| 0
| 0
| null | 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 9
|
21da617cfee1fe0b9c2d05f53a3e106b14989b63
| 1,569
|
py
|
Python
|
python_helpers/opcode_fx33.py
|
RandomBananazz/chip8mc
|
0e184c392a523c82dbc945325aa2cb9e5487e5e7
|
[
"MIT"
] | 3
|
2020-09-28T17:50:49.000Z
|
2020-12-30T18:23:46.000Z
|
python_helpers/opcode_fx33.py
|
RandomBananazz/chip8mc
|
0e184c392a523c82dbc945325aa2cb9e5487e5e7
|
[
"MIT"
] | null | null | null |
python_helpers/opcode_fx33.py
|
RandomBananazz/chip8mc
|
0e184c392a523c82dbc945325aa2cb9e5487e5e7
|
[
"MIT"
] | null | null | null |
for x in range(16):
with open(f'..\\data\\cpu\\functions\\opcode_switch\\opcode_fx33\\opcode_fx33_{x}.mcfunction', 'w') as f:
f.write(f'scoreboard players operation Global memory_value = Global V{hex(x)[2:].upper()}\n'
'scoreboard players operation Global copy_1 = Global memory_value\n'
'scoreboard players operation Global copy_2 = Global memory_value\n'
'scoreboard players operation Global memory_value /= c100 Constant\n'
'scoreboard players operation Global copy_2 %= c10 Constant\n'
'scoreboard players operation Global copy_1 %= c100 Constant\n'
'scoreboard players operation Global copy_1 /= c10 Constant\n'
'function cpu:memory/write_memory/write_memory\n'
'scoreboard players add Global I 1\n'
'scoreboard players operation Global memory_value = Global copy_1\n'
'function cpu:memory/write_memory/write_memory\n'
'scoreboard players add Global I 1\n'
'scoreboard players operation Global memory_value = Global copy_2\n'
'function cpu:memory/write_memory/write_memory\n'
'scoreboard players remove Global I 2\n')
"""
for x in range(16):
with open('..\\data\\cpu\\functions\\opcode_switch\\opcode_fx33.mcfunction', 'a') as f:
f.write(f'execute if score Global PC_nibble_2 matches {x} run function cpu:opcode_switch/opcode_fx33/opcode_fx33_{x}\n')
"""
| 65.375
| 128
| 0.626514
| 200
| 1,569
| 4.76
| 0.23
| 0.214286
| 0.207983
| 0.302521
| 0.877101
| 0.856092
| 0.856092
| 0.576681
| 0.319328
| 0.319328
| 0
| 0.031746
| 0.277247
| 1,569
| 23
| 129
| 68.217391
| 0.80776
| 0
| 0
| 0.294118
| 0
| 0
| 0.698713
| 0.164269
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| false
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 7
|
21df4958a3160597cb376ceaa54815342cd23baf
| 54,429
|
py
|
Python
|
tests/test_plugins/test_views.py
|
elyase/polyaxon
|
1c19f059a010a6889e2b7ea340715b2bcfa382a0
|
[
"MIT"
] | null | null | null |
tests/test_plugins/test_views.py
|
elyase/polyaxon
|
1c19f059a010a6889e2b7ea340715b2bcfa382a0
|
[
"MIT"
] | null | null | null |
tests/test_plugins/test_views.py
|
elyase/polyaxon
|
1c19f059a010a6889e2b7ea340715b2bcfa382a0
|
[
"MIT"
] | null | null | null |
# pylint:disable=too-many-lines
from unittest.mock import patch
import mock
import pytest
from rest_framework import status
from api.plugins.serializers import ProjectTensorboardJobSerializer
from api.utils.views.protected import ProtectedView
from constants.jobs import JobLifeCycle
from constants.urls import API_V1
from db.models.experiment_groups import ExperimentGroup
from db.models.experiments import Experiment
from db.models.notebooks import NotebookJob, NotebookJobStatus
from db.models.projects import Project
from db.models.tensorboards import TensorboardJob, TensorboardJobStatus
from dockerizer.tasks import build_project_notebook
from factories.factory_experiment_groups import ExperimentGroupFactory
from factories.factory_experiments import ExperimentFactory
from factories.factory_plugins import NotebookJobFactory, TensorboardJobFactory
from factories.factory_projects import ProjectFactory
from factories.factory_repos import RepoFactory
from factories.fixtures import notebook_spec_parsed_content, tensorboard_spec_parsed_content
from scheduler import notebook_scheduler
from scheduler.spawners.notebook_spawner import NotebookSpawner
from scheduler.spawners.project_job_spawner import ProjectJobSpawner
from scheduler.spawners.templates.constants import JOB_NAME
from scheduler.spawners.tensorboard_spawner import TensorboardSpawner
from tests.utils import BaseViewTest
@pytest.mark.plugins_mark
class TestProjectTensorboardListViewV1(BaseViewTest):
serializer_class = ProjectTensorboardJobSerializer
model_class = TensorboardJob
factory_class = TensorboardJobFactory
num_objects = 3
HAS_AUTH = True
DISABLE_RUNNER = True
def setUp(self):
super().setUp()
self.project = ProjectFactory(user=self.auth_client.user)
self.other_project = ProjectFactory()
self.url = '/{}/{}/{}/tensorboards/'.format(API_V1,
self.project.user.username,
self.project.name)
self.other_url = '/{}/{}/{}/tensorboards/'.format(API_V1,
self.other_project.user.username,
self.other_project.name)
self.objects = [self.factory_class(project=self.project) for _ in range(self.num_objects)]
# one object that does not belong to the filter
self.factory_class(project=self.other_project)
self.queryset = self.model_class.objects.filter(project=self.project)
self.queryset = self.queryset.order_by('-updated_at')
self.other_object = self.factory_class(project=self.other_project)
def test_get(self):
resp = self.auth_client.get(self.url)
assert resp.status_code == status.HTTP_200_OK
assert resp.data['next'] is None
assert resp.data['count'] == len(self.objects)
data = resp.data['results']
assert len(data) == self.queryset.count()
assert data == self.serializer_class(self.queryset, many=True).data
# Test other
resp = self.auth_client.get(self.other_url)
assert resp.status_code == status.HTTP_200_OK
jobs_count = self.queryset.all().count()
assert jobs_count == self.num_objects
# Getting all jobs
resp = self.auth_client.get(self.url)
assert resp.status_code == status.HTTP_200_OK
assert resp.data['count'] == jobs_count
def test_pagination(self):
limit = self.num_objects - 1
resp = self.auth_client.get("{}?limit={}".format(self.url, limit))
assert resp.status_code == status.HTTP_200_OK
next_page = resp.data.get('next')
assert next_page is not None
assert resp.data['count'] == self.queryset.count()
data = resp.data['results']
assert len(data) == limit
assert data == self.serializer_class(self.queryset[:limit], many=True).data
resp = self.auth_client.get(next_page)
assert resp.status_code == status.HTTP_200_OK
assert resp.data['next'] is None
data = resp.data['results']
assert len(data) == 1
assert data == self.serializer_class(self.queryset[limit:], many=True).data
def test_get_order(self):
resp = self.auth_client.get(self.url + '?sort=created_at,updated_at')
assert resp.status_code == status.HTTP_200_OK
assert resp.data['next'] is None
assert resp.data['count'] == len(self.objects)
data = resp.data['results']
assert len(data) == self.queryset.count()
assert data != self.serializer_class(self.queryset, many=True).data
assert data == self.serializer_class(self.queryset.order_by('created_at', 'updated_at'),
many=True).data
def test_get_order_pagination(self):
queryset = self.queryset.order_by('created_at', 'updated_at')
limit = self.num_objects - 1
resp = self.auth_client.get("{}?limit={}&{}".format(self.url,
limit,
'sort=created_at,updated_at'))
assert resp.status_code == status.HTTP_200_OK
next_page = resp.data.get('next')
assert next_page is not None
assert resp.data['count'] == queryset.count()
data = resp.data['results']
assert len(data) == limit
assert data == self.serializer_class(queryset[:limit], many=True).data
resp = self.auth_client.get(next_page)
assert resp.status_code == status.HTTP_200_OK
assert resp.data['next'] is None
data = resp.data['results']
assert len(data) == 1
assert data == self.serializer_class(queryset[limit:], many=True).data
@pytest.mark.filterwarnings('ignore::RuntimeWarning')
def test_get_filter(self):
# Wrong filter format raises
resp = self.auth_client.get(self.url + '?query=created_at<2010-01-01')
assert resp.status_code == status.HTTP_400_BAD_REQUEST
resp = self.auth_client.get(self.url + '?query=created_at:<2010-01-01')
assert resp.status_code == status.HTTP_200_OK
assert resp.data['next'] is None
assert resp.data['count'] == 0
resp = self.auth_client.get(self.url +
'?query=created_at:>=2010-01-01,status:Finished')
assert resp.status_code == status.HTTP_200_OK
assert resp.data['next'] is None
assert resp.data['count'] == 0
resp = self.auth_client.get(self.url +
'?query=created_at:>=2010-01-01,status:created|running')
assert resp.status_code == status.HTTP_200_OK
assert resp.data['next'] is None
assert resp.data['count'] == len(self.objects)
data = resp.data['results']
assert len(data) == self.queryset.count()
assert data == self.serializer_class(self.queryset, many=True).data
def test_get_filter_pagination(self):
limit = self.num_objects - 1
resp = self.auth_client.get("{}?limit={}&{}".format(
self.url,
limit,
'?query=created_at:>=2010-01-01,status:created|running'))
assert resp.status_code == status.HTTP_200_OK
next_page = resp.data.get('next')
assert next_page is not None
assert resp.data['count'] == self.queryset.count()
data = resp.data['results']
assert len(data) == limit
assert data == self.serializer_class(self.queryset[:limit], many=True).data
resp = self.auth_client.get(next_page)
assert resp.status_code == status.HTTP_200_OK
assert resp.data['next'] is None
data = resp.data['results']
assert len(data) == 1
assert data == self.serializer_class(self.queryset[limit:], many=True).data
@pytest.mark.plugins_mark
class TestStartProjectTensorboardViewV1(BaseViewTest):
model_class = Project
factory_class = ProjectFactory
HAS_AUTH = True
def setUp(self):
super().setUp()
self.object = self.factory_class(user=self.auth_client.user)
self.url = '/{}/{}/{}/tensorboard/start'.format(
API_V1,
self.object.user.username,
self.object.name)
self.queryset = self.model_class.objects.all()
def test_start(self):
assert self.queryset.count() == 1
assert self.object.tensorboard is None
with patch('scheduler.tasks.tensorboards.tensorboards_start.apply_async') as mock_fct:
resp = self.auth_client.post(self.url)
assert mock_fct.call_count == 1
assert resp.status_code == status.HTTP_201_CREATED
assert self.queryset.count() == 1
self.object.clear_cached_properties()
assert isinstance(self.object.tensorboard, TensorboardJob)
def test_spawner_start(self):
assert self.queryset.count() == 1
with patch('scheduler.tensorboard_scheduler.start_tensorboard') as mock_fct:
resp = self.auth_client.post(self.url)
assert mock_fct.call_count == 1
assert resp.status_code == status.HTTP_201_CREATED
assert self.queryset.count() == 1
def test_start_with_updated_config(self):
with patch('scheduler.tasks.tensorboards.tensorboards_start.apply_async') as mock_fct:
resp = self.auth_client.post(self.url)
assert mock_fct.call_count == 1
assert resp.status_code == status.HTTP_201_CREATED
# Start with default config
self.object.clear_cached_properties()
config = self.object.tensorboard.config
# Simulate stop the tensorboard
self.object.tensorboard.delete()
# Starting the tensorboard without config should pass
with patch('scheduler.tasks.tensorboards.tensorboards_start.apply_async') as mock_fct:
resp = self.auth_client.post(self.url)
assert mock_fct.call_count == 1
assert resp.status_code == status.HTTP_201_CREATED
# Check that still using same config
self.object.clear_cached_properties()
assert config == self.object.tensorboard.config
# Simulate stop the tensorboard
self.object.tensorboard.delete()
self.object.save()
# Starting again the tensorboard with different config
with patch('scheduler.tasks.tensorboards.tensorboards_start.apply_async') as mock_fct:
resp = self.auth_client.post(
self.url,
data={'config': tensorboard_spec_parsed_content.parsed_data})
assert mock_fct.call_count == 1
assert resp.status_code == status.HTTP_201_CREATED
self.object.clear_cached_properties()
# Check that the image was update
assert config != self.object.tensorboard.config
# Trying to start an already running job returns 200
# Starting again the tensorboard with different config
self.object.tensorboard.set_status(status=JobLifeCycle.BUILDING)
with patch('scheduler.tasks.tensorboards.tensorboards_start.apply_async') as mock_fct:
resp = self.auth_client.post(
self.url,
data={'config': tensorboard_spec_parsed_content.parsed_data})
assert mock_fct.call_count == 0
assert resp.status_code == status.HTTP_200_OK
def test_start_during_build_process(self):
with patch('scheduler.tasks.tensorboards.tensorboards_start.apply_async') as start_mock:
self.auth_client.post(self.url)
self.object.refresh_from_db()
assert start_mock.call_count == 1
assert self.object.tensorboard.last_status == JobLifeCycle.CREATED
# Check that user cannot start a new job if it's already building
self.object.tensorboard.set_status(status=JobLifeCycle.BUILDING)
with patch('scheduler.tasks.tensorboards.tensorboards_start.apply_async') as start_mock:
self.auth_client.post(self.url)
assert start_mock.call_count == 0
def test_starting_stopping_tensorboard_creating_new_one_create_new_job(self):
with patch('scheduler.tasks.tensorboards.tensorboards_start.apply_async') as start_mock:
self.auth_client.post(self.url)
self.object.refresh_from_db()
assert start_mock.call_count == 1
assert self.object.tensorboard.last_status == JobLifeCycle.CREATED
self.object.tensorboard.set_status(status=JobLifeCycle.STOPPED)
assert TensorboardJob.objects.count() == 1
assert TensorboardJobStatus.objects.count() == 2
with patch('scheduler.tasks.tensorboards.tensorboards_start.apply_async') as start_mock:
self.auth_client.post(self.url)
self.object.clear_cached_properties()
assert start_mock.call_count == 1
assert self.object.tensorboard.last_status == JobLifeCycle.CREATED
assert TensorboardJob.objects.count() == 2
assert TensorboardJobStatus.objects.count() == 3
@pytest.mark.plugins_mark
class TestStartExperimentTensorboardViewV1(BaseViewTest):
model_class = Experiment
factory_class = ExperimentFactory
HAS_AUTH = True
def setUp(self):
super().setUp()
self.project = ProjectFactory(user=self.auth_client.user)
self.object = self.factory_class(user=self.auth_client.user, project=self.project)
self.url = '/{}/{}/{}/experiments/{}/tensorboard/start'.format(
API_V1,
self.project.user.username,
self.project.name,
self.object.id)
self.queryset = self.model_class.objects
def test_start(self):
assert self.queryset.count() == 1
assert self.object.tensorboard is None
with patch('scheduler.tasks.tensorboards.tensorboards_start.apply_async') as mock_fct:
resp = self.auth_client.post(self.url)
assert mock_fct.call_count == 1
assert resp.status_code == status.HTTP_201_CREATED
assert self.queryset.count() == 1
self.object.clear_cached_properties()
assert isinstance(self.object.tensorboard, TensorboardJob)
assert self.project.tensorboard is None
def test_spawner_start(self):
assert self.queryset.count() == 1
with patch('scheduler.tensorboard_scheduler.start_tensorboard') as mock_fct:
resp = self.auth_client.post(self.url)
assert mock_fct.call_count == 1
assert resp.status_code == status.HTTP_201_CREATED
assert self.queryset.count() == 1
assert self.project.tensorboard is None
def test_start_with_updated_config(self):
with patch('scheduler.tasks.tensorboards.tensorboards_start.apply_async') as mock_fct:
resp = self.auth_client.post(self.url)
assert mock_fct.call_count == 1
assert resp.status_code == status.HTTP_201_CREATED
# Start with default config
self.object.refresh_from_db()
config = self.object.tensorboard.config
# Simulate stop the tensorboard
self.object.tensorboard.delete()
# Starting the tensorboard without config should pass
with patch('scheduler.tasks.tensorboards.tensorboards_start.apply_async') as mock_fct:
resp = self.auth_client.post(self.url)
assert mock_fct.call_count == 1
assert resp.status_code == status.HTTP_201_CREATED
# Check that still using same config
self.object.clear_cached_properties()
assert config == self.object.tensorboard.config
# Simulate stop the tensorboard
self.object.tensorboard.delete()
self.object.save()
# Starting again the tensorboard with different config
with patch('scheduler.tasks.tensorboards.tensorboards_start.apply_async') as mock_fct:
resp = self.auth_client.post(
self.url,
data={'config': tensorboard_spec_parsed_content.parsed_data})
assert mock_fct.call_count == 1
assert resp.status_code == status.HTTP_201_CREATED
self.object.clear_cached_properties()
# Check that the image was update
assert config != self.object.tensorboard.config
# Trying to start an already running job returns 200
# Starting again the tensorboard with different config
self.object.tensorboard.set_status(status=JobLifeCycle.BUILDING)
with patch('scheduler.tasks.tensorboards.tensorboards_start.apply_async') as mock_fct:
resp = self.auth_client.post(
self.url,
data={'config': tensorboard_spec_parsed_content.parsed_data})
assert mock_fct.call_count == 0
assert resp.status_code == status.HTTP_200_OK
assert self.project.tensorboard is None
def test_start_during_build_process(self):
with patch('scheduler.tasks.tensorboards.tensorboards_start.apply_async') as start_mock:
self.auth_client.post(self.url)
self.object.refresh_from_db()
assert start_mock.call_count == 1
assert self.object.tensorboard.last_status == JobLifeCycle.CREATED
# Check that user cannot start a new job if it's already building
self.object.tensorboard.set_status(status=JobLifeCycle.BUILDING)
with patch('scheduler.tasks.tensorboards.tensorboards_start.apply_async') as start_mock:
self.auth_client.post(self.url)
assert start_mock.call_count == 0
def test_starting_stopping_tensorboard_creating_new_one_create_new_job(self):
with patch('scheduler.tasks.tensorboards.tensorboards_start.apply_async') as start_mock:
self.auth_client.post(self.url)
self.object.refresh_from_db()
assert start_mock.call_count == 1
assert self.object.tensorboard.last_status == JobLifeCycle.CREATED
self.object.tensorboard.set_status(status=JobLifeCycle.STOPPED)
assert TensorboardJob.objects.count() == 1
assert TensorboardJobStatus.objects.count() == 2
with patch('scheduler.tasks.tensorboards.tensorboards_start.apply_async') as start_mock:
self.auth_client.post(self.url)
self.object.clear_cached_properties()
assert start_mock.call_count == 1
assert self.object.tensorboard.last_status == JobLifeCycle.CREATED
assert TensorboardJob.objects.count() == 2
assert TensorboardJobStatus.objects.count() == 3
assert self.project.tensorboard is None
@pytest.mark.plugins_mark
class TestStartExperimentGroupTensorboardViewV1(BaseViewTest):
model_class = ExperimentGroup
factory_class = ExperimentGroupFactory
HAS_AUTH = True
def setUp(self):
super().setUp()
self.project = ProjectFactory(user=self.auth_client.user)
with patch('scheduler.tasks.experiment_groups.'
'experiments_group_create.apply_async') as _: # noqa
self.object = self.factory_class(user=self.auth_client.user, project=self.project)
self.url = '/{}/{}/{}/groups/{}/tensorboard/start'.format(
API_V1,
self.project.user.username,
self.project.name,
self.object.id)
self.queryset = self.model_class.objects
def test_start(self):
assert self.queryset.count() == 1
assert self.object.tensorboard is None
with patch('scheduler.tasks.tensorboards.tensorboards_start.apply_async') as mock_fct:
resp = self.auth_client.post(self.url)
assert mock_fct.call_count == 1
assert resp.status_code == status.HTTP_201_CREATED
assert self.queryset.count() == 1
self.object.clear_cached_properties()
assert isinstance(self.object.tensorboard, TensorboardJob)
assert self.project.tensorboard is None
def test_spawner_start(self):
assert self.queryset.count() == 1
with patch('scheduler.tensorboard_scheduler.start_tensorboard') as mock_fct:
resp = self.auth_client.post(self.url)
assert mock_fct.call_count == 1
assert resp.status_code == status.HTTP_201_CREATED
assert self.queryset.count() == 1
assert self.project.tensorboard is None
def test_start_with_updated_config(self):
with patch('scheduler.tasks.tensorboards.tensorboards_start.apply_async') as mock_fct:
resp = self.auth_client.post(self.url)
assert mock_fct.call_count == 1
assert resp.status_code == status.HTTP_201_CREATED
# Start with default config
self.object.clear_cached_properties()
config = self.object.tensorboard.config
# Simulate stop the tensorboard
self.object.tensorboard.delete()
# Starting the tensorboard without config should pass
with patch('scheduler.tasks.tensorboards.tensorboards_start.apply_async') as mock_fct:
resp = self.auth_client.post(self.url)
assert mock_fct.call_count == 1
assert resp.status_code == status.HTTP_201_CREATED
# Check that still using same config
self.object.clear_cached_properties()
assert config == self.object.tensorboard.config
# Simulate stop the tensorboard
self.object.tensorboard.delete()
self.object.save()
# Starting again the tensorboard with different config
with patch('scheduler.tasks.tensorboards.tensorboards_start.apply_async') as mock_fct:
resp = self.auth_client.post(
self.url,
data={'config': tensorboard_spec_parsed_content.parsed_data})
assert mock_fct.call_count == 1
assert resp.status_code == status.HTTP_201_CREATED
self.object.clear_cached_properties()
# Check that the image was update
assert config != self.object.tensorboard.config
# Trying to start an already running job returns 200
# Starting again the tensorboard with different config
self.object.tensorboard.set_status(status=JobLifeCycle.BUILDING)
with patch('scheduler.tasks.tensorboards.tensorboards_start.apply_async') as mock_fct:
resp = self.auth_client.post(
self.url,
data={'config': tensorboard_spec_parsed_content.parsed_data})
assert mock_fct.call_count == 0
assert resp.status_code == status.HTTP_200_OK
assert self.project.tensorboard is None
def test_start_during_build_process(self):
with patch('scheduler.tasks.tensorboards.tensorboards_start.apply_async') as start_mock:
self.auth_client.post(self.url)
self.object.refresh_from_db()
assert start_mock.call_count == 1
assert self.object.tensorboard.last_status == JobLifeCycle.CREATED
# Check that user cannot start a new job if it's already building
self.object.tensorboard.set_status(status=JobLifeCycle.BUILDING)
with patch('scheduler.tasks.tensorboards.tensorboards_start.apply_async') as start_mock:
self.auth_client.post(self.url)
assert start_mock.call_count == 0
def test_starting_stopping_tensorboard_creating_new_one_create_new_job(self):
with patch('scheduler.tasks.tensorboards.tensorboards_start.apply_async') as start_mock:
self.auth_client.post(self.url)
self.object.refresh_from_db()
assert start_mock.call_count == 1
assert self.object.tensorboard.last_status == JobLifeCycle.CREATED
self.object.tensorboard.set_status(status=JobLifeCycle.STOPPED)
assert TensorboardJob.objects.count() == 1
assert TensorboardJobStatus.objects.count() == 2
with patch('scheduler.tasks.tensorboards.tensorboards_start.apply_async') as start_mock:
self.auth_client.post(self.url)
self.object.clear_cached_properties()
assert start_mock.call_count == 1
assert self.object.tensorboard.last_status == JobLifeCycle.CREATED
assert TensorboardJob.objects.count() == 2
assert TensorboardJobStatus.objects.count() == 3
assert self.project.tensorboard is None
@pytest.mark.plugins_mark
class TestStopProjectTensorboardViewV1(BaseViewTest):
model_class = Project
factory_class = ProjectFactory
HAS_AUTH = True
def setUp(self):
super().setUp()
self.object = self.factory_class(user=self.auth_client.user)
tensorboard = TensorboardJobFactory(project=self.object)
tensorboard.set_status(status=JobLifeCycle.RUNNING)
self.url = '/{}/{}/{}/tensorboard/stop'.format(
API_V1,
self.object.user.username,
self.object.name)
self.queryset = TensorboardJob.objects.all()
def test_stop(self):
data = {}
assert self.queryset.count() == 1
with patch('scheduler.tasks.tensorboards.tensorboards_stop.apply_async') as mock_fct:
resp = self.auth_client.post(self.url, data)
assert mock_fct.call_count == 1
assert resp.status_code == status.HTTP_200_OK
assert self.queryset.count() == 1
def test_spawner_stop(self):
data = {}
assert self.queryset.count() == 1
with patch('scheduler.tensorboard_scheduler.stop_tensorboard') as mock_fct:
resp = self.auth_client.post(self.url, data)
assert mock_fct.call_count == 1
assert resp.status_code == status.HTTP_200_OK
assert self.queryset.count() == 1
@pytest.mark.plugins_mark
class TestStopExperimentTensorboardViewV1(BaseViewTest):
model_class = Experiment
factory_class = ExperimentFactory
HAS_AUTH = True
def setUp(self):
super().setUp()
self.project = ProjectFactory(user=self.auth_client.user)
self.object = self.factory_class(user=self.auth_client.user, project=self.project)
tensorboard = TensorboardJobFactory(project=self.project, experiment=self.object)
tensorboard.set_status(status=JobLifeCycle.RUNNING)
self.url = '/{}/{}/{}/experiments/{}/tensorboard/stop'.format(
API_V1,
self.project.user.username,
self.project.name,
self.object.id)
self.queryset = TensorboardJob.objects.all()
def test_stop(self):
data = {}
assert self.queryset.count() == 1
with patch('scheduler.tasks.tensorboards.tensorboards_stop.apply_async') as mock_fct:
resp = self.auth_client.post(self.url, data)
assert mock_fct.call_count == 1
assert resp.status_code == status.HTTP_200_OK
assert self.queryset.count() == 1
def test_spawner_stop(self):
data = {}
assert self.queryset.count() == 1
with patch('scheduler.tensorboard_scheduler.stop_tensorboard') as mock_fct:
resp = self.auth_client.post(self.url, data)
assert mock_fct.call_count == 1
assert resp.status_code == status.HTTP_200_OK
assert self.queryset.count() == 1
@pytest.mark.plugins_mark
class TestStopExperimentGroupTensorboardViewV1(BaseViewTest):
model_class = ExperimentGroup
factory_class = ExperimentGroupFactory
HAS_AUTH = True
def setUp(self):
super().setUp()
self.project = ProjectFactory(user=self.auth_client.user)
with patch('scheduler.tasks.experiment_groups.'
'experiments_group_create.apply_async') as _: # noqa
self.object = self.factory_class(user=self.auth_client.user, project=self.project)
tensorboard = TensorboardJobFactory(project=self.project, experiment_group=self.object)
tensorboard.set_status(status=JobLifeCycle.RUNNING)
self.url = '/{}/{}/{}/groups/{}/tensorboard/stop'.format(
API_V1,
self.project.user.username,
self.project.name,
self.object.id)
self.queryset = TensorboardJob.objects.all()
def test_stop(self):
data = {}
assert self.queryset.count() == 1
with patch('scheduler.tasks.tensorboards.tensorboards_stop.apply_async') as mock_fct:
resp = self.auth_client.post(self.url, data)
assert mock_fct.call_count == 1
assert resp.status_code == status.HTTP_200_OK
assert self.queryset.count() == 1
def test_spawner_stop(self):
data = {}
assert self.queryset.count() == 1
with patch('scheduler.tensorboard_scheduler.stop_tensorboard') as mock_fct:
resp = self.auth_client.post(self.url, data)
assert mock_fct.call_count == 1
assert resp.status_code == status.HTTP_200_OK
assert self.queryset.count() == 1
@pytest.mark.plugins_mark
class TestStartNotebookViewV1(BaseViewTest):
model_class = Project
factory_class = ProjectFactory
HAS_AUTH = True
def setUp(self):
super().setUp()
self.object = self.factory_class(user=self.auth_client.user)
self.url = '/{}/{}/{}/notebook/start'.format(
API_V1,
self.object.user.username,
self.object.name)
self.queryset = self.model_class.objects.all()
def test_post_without_config_fails(self):
resp = self.auth_client.post(self.url)
assert resp.status_code == status.HTTP_400_BAD_REQUEST
def test_build(self):
data = {'config': notebook_spec_parsed_content.parsed_data}
assert self.queryset.count() == 1
assert self.object.notebook is None
with patch('scheduler.tasks.notebooks.projects_notebook_build.apply_async') as mock_fct:
resp = self.auth_client.post(self.url, data)
assert mock_fct.call_count == 1
assert resp.status_code == status.HTTP_201_CREATED
assert self.queryset.count() == 1
self.object.clear_cached_properties()
assert isinstance(self.object.notebook, NotebookJob)
def test_start(self):
data = {'config': notebook_spec_parsed_content.parsed_data}
assert self.queryset.count() == 1
with patch('scheduler.tasks.notebooks.'
'projects_notebook_build.apply_async') as build_mock_fct:
resp = self.auth_client.post(self.url, data)
assert build_mock_fct.call_count == 1
# Simulate build
with patch('dockerizer.builders.notebooks.build_notebook_job') as mock_fct:
build_project_notebook(project_id=self.object.id)
assert mock_fct.call_count == 1
assert resp.status_code == status.HTTP_201_CREATED
assert self.queryset.count() == 1
def test_build_with_updated_config(self):
data = {'config': notebook_spec_parsed_content.parsed_data}
with patch('scheduler.tasks.notebooks.projects_notebook_build.apply_async') as mock_fct:
resp = self.auth_client.post(self.url, data)
assert mock_fct.call_count == 1
assert resp.status_code == status.HTTP_201_CREATED
# Start with default config
self.object.refresh_from_db()
config = self.object.notebook.config
# Simulate stop the notebook
self.object.notebook.delete()
# Starting the notebook without config should not pass
with patch('scheduler.tasks.notebooks.projects_notebook_build.apply_async') as mock_fct:
resp = self.auth_client.post(self.url)
assert mock_fct.call_count == 0
assert resp.status_code == status.HTTP_400_BAD_REQUEST
# Check that still using same config
self.object.clear_cached_properties()
assert self.object.notebook is None
# Starting again the notebook with different config
data['config']['build']['image'] = 'image_v2'
with patch('scheduler.tasks.notebooks.projects_notebook_build.apply_async') as _: # noqa
self.auth_client.post(self.url, data)
self.object.clear_cached_properties()
# Check that the image was update
assert config != self.object.notebook.config
# Trying to start an already running job returns 200
# Starting again the tensorboard with different config
self.object.notebook.set_status(status=JobLifeCycle.BUILDING)
with patch('scheduler.tasks.notebooks.projects_notebook_build.apply_async') as mock_fct:
resp = self.auth_client.post(self.url, data=data)
assert mock_fct.call_count == 0
assert resp.status_code == status.HTTP_200_OK
def test_start_during_build_process(self):
data = {'config': notebook_spec_parsed_content.parsed_data}
with patch('scheduler.tasks.notebooks.projects_notebook_build.apply_async') as start_mock:
resp = self.auth_client.post(self.url, data=data)
assert resp.status_code == status.HTTP_201_CREATED
self.object.refresh_from_db()
assert start_mock.call_count == 1
assert self.object.notebook.last_status == JobLifeCycle.CREATED
# Check that user cannot start a new job if it's already building
self.object.notebook.set_status(status=JobLifeCycle.BUILDING)
with patch('scheduler.tasks.notebooks.projects_notebook_build.apply_async') as start_mock:
resp = self.auth_client.post(self.url)
assert resp.status_code == status.HTTP_200_OK
assert start_mock.call_count == 0
def test_starting_stopping_notebook_creating_new_one_create_new_job(self):
data = {'config': notebook_spec_parsed_content.parsed_data}
with patch('scheduler.tasks.notebooks.projects_notebook_build.apply_async') as start_mock:
self.auth_client.post(self.url, data=data)
self.object.refresh_from_db()
assert start_mock.call_count == 1
assert self.object.notebook.last_status == JobLifeCycle.CREATED
self.object.notebook.set_status(status=JobLifeCycle.STOPPED)
assert NotebookJob.objects.count() == 1
assert NotebookJobStatus.objects.count() == 2
with patch('scheduler.tasks.notebooks.projects_notebook_build.apply_async') as start_mock:
self.auth_client.post(self.url, data=data)
self.object.refresh_from_db()
assert start_mock.call_count == 1
self.object.clear_cached_properties()
assert self.object.notebook.last_status == JobLifeCycle.CREATED
assert NotebookJob.objects.count() == 2
assert NotebookJobStatus.objects.count() == 3
@pytest.mark.plugins_mark
class TestStopNotebookViewV1(BaseViewTest):
model_class = Project
factory_class = ProjectFactory
HAS_AUTH = True
def setUp(self):
super().setUp()
self.object = self.factory_class(user=self.auth_client.user)
tensorboard = NotebookJobFactory(project=self.object)
tensorboard.set_status(status=JobLifeCycle.RUNNING)
RepoFactory(project=self.object)
self.url = '/{}/{}/{}/notebook/stop'.format(
API_V1,
self.object.user.username,
self.object.name)
self.queryset = self.model_class.objects.all()
def test_stop(self):
data = {}
assert self.queryset.count() == 1
with patch('scheduler.tasks.notebooks.projects_notebook_stop.apply_async') as mock_fct:
with patch('libs.repos.git.commit') as mock_git_commit:
with patch('libs.repos.git.undo') as mock_git_undo:
resp = self.auth_client.post(self.url, data)
assert mock_fct.call_count == 1
assert mock_git_commit.call_count == 1
assert mock_git_undo.call_count == 0
assert resp.status_code == status.HTTP_200_OK
assert self.queryset.count() == 1
def test_stop_without_committing(self):
data = {'commit': False}
assert self.queryset.count() == 1
with patch('scheduler.tasks.notebooks.projects_notebook_stop.apply_async') as mock_fct:
with patch('libs.repos.git.commit') as mock_git_commit:
with patch('libs.repos.git.undo') as mock_git_undo:
resp = self.auth_client.post(self.url, data)
assert mock_fct.call_count == 1
assert mock_git_commit.call_count == 0
assert mock_git_undo.call_count == 1
assert resp.status_code == status.HTTP_200_OK
assert self.queryset.count() == 1
def test_spawner_stop(self):
data = {}
assert self.queryset.count() == 1
with patch('scheduler.notebook_scheduler.stop_notebook') as mock_fct:
resp = self.auth_client.post(self.url, data)
assert mock_fct.call_count == 1
assert resp.status_code == status.HTTP_200_OK
assert self.queryset.count() == 1
@pytest.mark.plugins_mark
class BaseTestPluginViewV1(BaseViewTest):
plugin_app = ''
@classmethod
def _get_url(cls, project, path=None):
url = '/{}/{}/{}'.format(
cls.plugin_app,
project.user.username,
project.name)
if path:
url = '{}/{}'.format(url, path)
return url
@classmethod
def _get_service_url(cls, deployment_name):
return ProjectJobSpawner._get_proxy_url( # pylint:disable=protected-access
namespace='polyaxon',
job_name=cls.plugin_app,
deployment_name=deployment_name,
port=12503)
def test_rejects_anonymous_user_and_redirected_to_login_page(self):
project = ProjectFactory()
response = self.client.get(self._get_url(project))
assert response.status_code == 302
def test_rejects_user_with_no_privileges(self):
project = ProjectFactory(is_public=False)
response = self.auth_client.get(self._get_url(project))
assert response.status_code in (status.HTTP_401_UNAUTHORIZED, status.HTTP_403_FORBIDDEN)
def test_project_with_no_job(self):
project = ProjectFactory(user=self.auth_client.user)
response = self.auth_client.get(self._get_url(project))
assert response.status_code == status.HTTP_404_NOT_FOUND
@pytest.mark.plugins_mark
class TestTensorboardProjectViewV1(BaseTestPluginViewV1):
plugin_app = TensorboardSpawner.TENSORBOARD_JOB_NAME
def test_project_requests_tensorboard_url(self):
project = ProjectFactory(user=self.auth_client.user)
tensorboard = TensorboardJobFactory(project=project)
tensorboard.set_status(status=JobLifeCycle.RUNNING)
with patch('scheduler.tensorboard_scheduler.get_tensorboard_url') as mock_fct:
response = self.auth_client.get(self._get_url(project))
assert mock_fct.call_count == 1
assert response.status_code == 200
@mock.patch('scheduler.tensorboard_scheduler.TensorboardSpawner')
def test_redirects_to_proxy_protected_url(self, spawner_mock):
project = ProjectFactory(user=self.auth_client.user)
tensorboard = TensorboardJobFactory(project=project)
tensorboard.set_status(status=JobLifeCycle.RUNNING)
deployment_name = JOB_NAME.format(
job_uuid=tensorboard.uuid.hex, name=self.plugin_app)
service_url = self._get_service_url(deployment_name=deployment_name)
mock_instance = spawner_mock.return_value
mock_instance.get_tensorboard_url.return_value = service_url
response = self.auth_client.get(self._get_url(project))
assert response.status_code == 200
self.assertTrue(ProtectedView.NGINX_REDIRECT_HEADER in response)
proxy_url = '{}/'.format(service_url)
self.assertEqual(response[ProtectedView.NGINX_REDIRECT_HEADER], proxy_url)
@mock.patch('scheduler.tensorboard_scheduler.TensorboardSpawner')
def test_redirects_to_proxy_protected_url_with_extra_path(self, spawner_mock):
project = ProjectFactory(user=self.auth_client.user)
tensorboard = TensorboardJobFactory(project=project)
tensorboard.set_status(status=JobLifeCycle.RUNNING)
deployment_name = JOB_NAME.format(
job_uuid=tensorboard.uuid.hex, name=self.plugin_app)
service_url = self._get_service_url(deployment_name=deployment_name)
mock_instance = spawner_mock.return_value
mock_instance.get_tensorboard_url.return_value = service_url
# To `tree?`
response = self.auth_client.get(self._get_url(project, 'tree?'))
assert response.status_code == 200
self.assertTrue(ProtectedView.NGINX_REDIRECT_HEADER in response)
proxy_url = '{}/{}'.format(
service_url,
'tree/'
)
self.assertEqual(response[ProtectedView.NGINX_REDIRECT_HEADER], proxy_url)
# To static files
response = self.auth_client.get(
self._get_url(project, 'static/components/something?v=4.7.0'))
assert response.status_code == 200
self.assertTrue(ProtectedView.NGINX_REDIRECT_HEADER in response)
proxy_url = '{}/{}'.format(
service_url,
'static/components/something?v=4.7.0'
)
self.assertEqual(response[ProtectedView.NGINX_REDIRECT_HEADER], proxy_url)
@pytest.mark.plugins_mark
class TestTensorboardExperimentViewV1(BaseTestPluginViewV1):
plugin_app = TensorboardSpawner.TENSORBOARD_JOB_NAME
@classmethod
def _get_url(cls, project, experiment, path=None): # noqa
url = '/{}/{}/{}/experiments/{}'.format(
cls.plugin_app,
project.user.username,
project.name,
experiment.id)
if path:
url = '{}/{}'.format(url, path)
return url
def test_rejects_anonymous_user_and_redirected_to_login_page(self):
project = ProjectFactory()
experiment = ExperimentFactory(project=project)
response = self.client.get(self._get_url(project, experiment))
assert response.status_code == 302
def test_rejects_user_with_no_privileges(self):
project = ProjectFactory(is_public=False)
experiment = ExperimentFactory(project=project)
response = self.auth_client.get(self._get_url(project, experiment))
assert response.status_code in (status.HTTP_401_UNAUTHORIZED, status.HTTP_403_FORBIDDEN)
def test_project_with_no_job(self):
project = ProjectFactory(user=self.auth_client.user)
experiment = ExperimentFactory(project=project)
response = self.auth_client.get(self._get_url(project, experiment))
assert response.status_code == status.HTTP_404_NOT_FOUND
def test_project_requests_tensorboard_url(self):
project = ProjectFactory(user=self.auth_client.user)
experiment = ExperimentFactory(project=project)
tensorboard = TensorboardJobFactory(project=project, experiment=experiment)
tensorboard.set_status(status=JobLifeCycle.RUNNING)
with patch('scheduler.tensorboard_scheduler.get_tensorboard_url') as mock_fct:
response = self.auth_client.get(self._get_url(project, experiment))
assert mock_fct.call_count == 1
assert response.status_code == 200
@mock.patch('scheduler.tensorboard_scheduler.TensorboardSpawner')
def test_redirects_to_proxy_protected_url(self, spawner_mock):
project = ProjectFactory(user=self.auth_client.user)
experiment = ExperimentFactory(project=project)
tensorboard = TensorboardJobFactory(project=project, experiment=experiment)
tensorboard.set_status(status=JobLifeCycle.RUNNING)
deployment_name = JOB_NAME.format(
job_uuid=tensorboard.uuid.hex, name=self.plugin_app)
service_url = self._get_service_url(deployment_name=deployment_name)
mock_instance = spawner_mock.return_value
mock_instance.get_tensorboard_url.return_value = service_url
response = self.auth_client.get(self._get_url(project, experiment))
assert response.status_code == 200
self.assertTrue(ProtectedView.NGINX_REDIRECT_HEADER in response)
proxy_url = '{}/'.format(service_url)
self.assertEqual(response[ProtectedView.NGINX_REDIRECT_HEADER], proxy_url)
@mock.patch('scheduler.tensorboard_scheduler.TensorboardSpawner')
def test_redirects_to_proxy_protected_url_with_extra_path(self, spawner_mock):
project = ProjectFactory(user=self.auth_client.user)
experiment = ExperimentFactory(project=project)
tensorboard = TensorboardJobFactory(project=project, experiment=experiment)
tensorboard.set_status(status=JobLifeCycle.RUNNING)
deployment_name = JOB_NAME.format(
job_uuid=tensorboard.uuid.hex, name=self.plugin_app)
service_url = self._get_service_url(deployment_name=deployment_name)
mock_instance = spawner_mock.return_value
mock_instance.get_tensorboard_url.return_value = service_url
# To `tree?`
response = self.auth_client.get(self._get_url(project, experiment, 'tree?'))
assert response.status_code == 200
self.assertTrue(ProtectedView.NGINX_REDIRECT_HEADER in response)
proxy_url = '{}/{}'.format(
service_url,
'tree/'
)
self.assertEqual(response[ProtectedView.NGINX_REDIRECT_HEADER], proxy_url)
# To static files
response = self.auth_client.get(
self._get_url(project, experiment, 'static/components/something?v=4.7.0'))
assert response.status_code == 200
self.assertTrue(ProtectedView.NGINX_REDIRECT_HEADER in response)
proxy_url = '{}/{}'.format(
service_url,
'static/components/something?v=4.7.0'
)
self.assertEqual(response[ProtectedView.NGINX_REDIRECT_HEADER], proxy_url)
@pytest.mark.plugins_mark
class TestTensorboardExperimentGroupViewV1(BaseTestPluginViewV1):
plugin_app = TensorboardSpawner.TENSORBOARD_JOB_NAME
@classmethod
def _get_url(cls, project, group, path=None): # pylint:disable=arguments-differ
url = '/{}/{}/{}/groups/{}'.format(
cls.plugin_app,
project.user.username,
project.name,
group.id)
if path:
url = '{}/{}'.format(url, path)
return url
def test_rejects_anonymous_user_and_redirected_to_login_page(self):
project = ProjectFactory()
with patch('scheduler.tasks.experiment_groups.'
'experiments_group_create.apply_async') as _: # noqa
group = ExperimentGroupFactory(project=project)
response = self.client.get(self._get_url(project, group))
assert response.status_code == 302
def test_rejects_user_with_no_privileges(self):
project = ProjectFactory(is_public=False)
with patch('scheduler.tasks.experiment_groups.'
'experiments_group_create.apply_async') as _: # noqa
group = ExperimentGroupFactory(project=project)
response = self.auth_client.get(self._get_url(project, group))
assert response.status_code in (status.HTTP_401_UNAUTHORIZED, status.HTTP_403_FORBIDDEN)
def test_project_with_no_job(self):
project = ProjectFactory(user=self.auth_client.user)
with patch('scheduler.tasks.experiment_groups.'
'experiments_group_create.apply_async') as _: # noqa
group = ExperimentGroupFactory(project=project)
response = self.auth_client.get(self._get_url(project, group))
assert response.status_code == status.HTTP_404_NOT_FOUND
def test_project_requests_tensorboard_url(self):
project = ProjectFactory(user=self.auth_client.user)
with patch('scheduler.tasks.experiment_groups.'
'experiments_group_create.apply_async') as _: # noqa
group = ExperimentGroupFactory(project=project)
tensorboard = TensorboardJobFactory(project=project, experiment_group=group)
tensorboard.set_status(status=JobLifeCycle.RUNNING)
with patch('scheduler.tensorboard_scheduler.get_tensorboard_url') as mock_fct:
response = self.auth_client.get(self._get_url(project, group))
assert mock_fct.call_count == 1
assert response.status_code == 200
@mock.patch('scheduler.tensorboard_scheduler.TensorboardSpawner')
def test_redirects_to_proxy_protected_url(self, spawner_mock):
project = ProjectFactory(user=self.auth_client.user)
with patch('scheduler.tasks.experiment_groups.'
'experiments_group_create.apply_async') as _: # noqa
group = ExperimentGroupFactory(project=project)
tensorboard = TensorboardJobFactory(project=project, experiment_group=group)
tensorboard.set_status(status=JobLifeCycle.RUNNING)
deployment_name = JOB_NAME.format(
job_uuid=tensorboard.uuid.hex, name=self.plugin_app)
service_url = self._get_service_url(deployment_name=deployment_name)
mock_instance = spawner_mock.return_value
mock_instance.get_tensorboard_url.return_value = service_url
response = self.auth_client.get(self._get_url(project, group))
assert response.status_code == 200
self.assertTrue(ProtectedView.NGINX_REDIRECT_HEADER in response)
proxy_url = '{}/'.format(service_url)
self.assertEqual(response[ProtectedView.NGINX_REDIRECT_HEADER], proxy_url)
@mock.patch('scheduler.tensorboard_scheduler.TensorboardSpawner')
def test_redirects_to_proxy_protected_url_with_extra_path(self, spawner_mock):
project = ProjectFactory(user=self.auth_client.user)
with patch('scheduler.tasks.experiment_groups.'
'experiments_group_create.apply_async') as _: # noqa
group = ExperimentGroupFactory(project=project)
tensorboard = TensorboardJobFactory(project=project, experiment_group=group)
tensorboard.set_status(status=JobLifeCycle.RUNNING)
deployment_name = JOB_NAME.format(
job_uuid=tensorboard.uuid.hex, name=self.plugin_app)
service_url = self._get_service_url(deployment_name=deployment_name)
mock_instance = spawner_mock.return_value
mock_instance.get_tensorboard_url.return_value = service_url
# To `tree?`
response = self.auth_client.get(self._get_url(project, group, 'tree?'))
assert response.status_code == 200
self.assertTrue(ProtectedView.NGINX_REDIRECT_HEADER in response)
proxy_url = '{}/{}'.format(
service_url,
'tree/'
)
self.assertEqual(response[ProtectedView.NGINX_REDIRECT_HEADER], proxy_url)
# To static files
response = self.auth_client.get(
self._get_url(project, group, 'static/components/something?v=4.7.0'))
assert response.status_code == 200
self.assertTrue(ProtectedView.NGINX_REDIRECT_HEADER in response)
proxy_url = '{}/{}'.format(
service_url,
'static/components/something?v=4.7.0'
)
self.assertEqual(response[ProtectedView.NGINX_REDIRECT_HEADER], proxy_url)
@pytest.mark.plugins_mark
class TestNotebookViewV1(BaseTestPluginViewV1):
plugin_app = NotebookSpawner.NOTEBOOK_JOB_NAME
def test_project_requests_notebook_url(self):
project = ProjectFactory(user=self.auth_client.user)
notebook = NotebookJobFactory(project=project)
notebook.set_status(status=JobLifeCycle.RUNNING)
with patch('scheduler.notebook_scheduler.get_notebook_url') as mock_url_fct:
with patch('scheduler.notebook_scheduler.get_notebook_token') as mock_token_fct:
response = self.auth_client.get(self._get_url(project))
assert mock_url_fct.call_count == 1
assert mock_token_fct.call_count == 1
assert response.status_code == 200
@mock.patch('scheduler.notebook_scheduler.NotebookSpawner')
def test_redirects_to_proxy_protected_url(self, spawner_mock):
project = ProjectFactory(user=self.auth_client.user)
notebook = NotebookJobFactory(project=project)
notebook.set_status(status=JobLifeCycle.RUNNING)
deployment_name = JOB_NAME.format(
job_uuid=notebook.uuid.hex, name=self.plugin_app)
service_url = self._get_service_url(deployment_name=deployment_name)
mock_instance = spawner_mock.return_value
mock_instance.get_notebook_url.return_value = service_url
response = self.auth_client.get(self._get_url(project))
assert response.status_code == 200
self.assertTrue(ProtectedView.NGINX_REDIRECT_HEADER in response)
proxy_url = '{}/{}?token={}'.format(
service_url,
'tree',
notebook_scheduler.get_notebook_token(notebook)
)
self.assertEqual(response[ProtectedView.NGINX_REDIRECT_HEADER], proxy_url)
@mock.patch('scheduler.notebook_scheduler.NotebookSpawner')
def test_redirects_to_proxy_protected_url_with_extra_path(self, spawner_mock):
project = ProjectFactory(user=self.auth_client.user)
notebook = NotebookJobFactory(project=project)
notebook.set_status(status=JobLifeCycle.RUNNING)
deployment_name = JOB_NAME.format(
job_uuid=notebook.uuid.hex, name=self.plugin_app)
service_url = self._get_service_url(deployment_name=deployment_name)
mock_instance = spawner_mock.return_value
mock_instance.get_notebook_url.return_value = service_url
# To `tree?`
response = self.auth_client.get(self._get_url(project, 'tree?'))
assert response.status_code == 200
self.assertTrue(ProtectedView.NGINX_REDIRECT_HEADER in response)
proxy_url = '{}/{}?token={}'.format(
service_url,
'tree',
notebook_scheduler.get_notebook_token(notebook)
)
self.assertEqual(response[ProtectedView.NGINX_REDIRECT_HEADER], proxy_url)
# To static files
response = self.auth_client.get(
self._get_url(project, 'static/components/something?v=4.7.0'))
assert response.status_code == 200
self.assertTrue(ProtectedView.NGINX_REDIRECT_HEADER in response)
proxy_url = '{}/{}&token={}'.format(
service_url,
'static/components/something?v=4.7.0',
notebook_scheduler.get_notebook_token(notebook)
)
self.assertEqual(response[ProtectedView.NGINX_REDIRECT_HEADER], proxy_url)
# Prevent this base class from running tests
del BaseTestPluginViewV1
| 44.143552
| 98
| 0.685315
| 6,469
| 54,429
| 5.529448
| 0.043438
| 0.025496
| 0.044618
| 0.029075
| 0.907241
| 0.902376
| 0.892452
| 0.880682
| 0.872239
| 0.861308
| 0
| 0.010851
| 0.22115
| 54,429
| 1,232
| 99
| 44.179383
| 0.83294
| 0.038197
| 0
| 0.831643
| 0
| 0
| 0.10623
| 0.095691
| 0
| 0
| 0
| 0
| 0.286004
| 1
| 0.070994
| false
| 0
| 0.026369
| 0.001014
| 0.151116
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 7
|
df3b05e0b0ede771d66644c8a382ef9b8b114587
| 27,935
|
py
|
Python
|
getml/datasets/samples_generator.py
|
srnnkls/getml-python-api
|
032b2fec19a0e0a519eab480ee61e0d422d63993
|
[
"MIT"
] | null | null | null |
getml/datasets/samples_generator.py
|
srnnkls/getml-python-api
|
032b2fec19a0e0a519eab480ee61e0d422d63993
|
[
"MIT"
] | null | null | null |
getml/datasets/samples_generator.py
|
srnnkls/getml-python-api
|
032b2fec19a0e0a519eab480ee61e0d422d63993
|
[
"MIT"
] | null | null | null |
"""
Generate samples of artificial data sets.
"""
import numpy as np
import pandas as pd
import getml.aggregations as aggregations
# -----------------------------------------------------------------------------
def _aggregate(table, aggregation, col, join_key):
"""Implements the aggregation."""
if aggregation == aggregations.Avg:
return table[[col, join_key]].groupby(
[join_key],
as_index=False
).mean()
elif aggregation == aggregations.Count:
return table[[col, join_key]].groupby(
[join_key],
as_index=False
).count()
elif aggregation == aggregations.CountDistinct:
series = table[[col, join_key]].groupby(
[join_key],
as_index=False
)[col].nunique()
output = table[[col, join_key]].groupby(
[join_key],
as_index=False
).count()
output[col] = series
return output
elif aggregation == aggregations.CountMinusCountDistinct:
series = table[[col, join_key]].groupby(
[join_key],
as_index=False
)[col].nunique()
output = table[[col, join_key]].groupby(
[join_key],
as_index=False
).count()
output[col] -= series
return output
elif aggregation == aggregations.Max:
return table[[col, join_key]].groupby(
[join_key],
as_index=False
).max()
elif aggregation == aggregations.Median:
return table[[col, join_key]].groupby(
[join_key],
as_index=False
).median()
elif aggregation == aggregations.Min:
return table[[col, join_key]].groupby(
[join_key],
as_index=False
).min()
elif aggregation == aggregations.Stddev:
return table[[col, join_key]].groupby(
[join_key],
as_index=False
).std()
elif aggregation == aggregations.Sum:
return table[[col, join_key]].groupby(
[join_key],
as_index=False
).sum()
elif aggregation == aggregations.Var:
return table[[col, join_key]].groupby(
[join_key],
as_index=False
).var()
else:
raise Exception("Aggregation '" + aggregation + "' not known!")
# -----------------------------------------------------------------------------
def make_categorical(n_rows_population=500,
n_rows_peripheral=125000,
random_state=None,
aggregation=aggregations.Count):
"""Generate a random dataset with categorical variables
The dataset consists of a population table and one peripheral table.
The peripheral table has 3 columns:
* `column_01`: random categorical variable between '0' and '9'
* `join_key`: random integer in the range from 0 to ``n_rows_population``
* `time_stamp`: random number between 0 and 1
The population table has 4 columns:
* `column_01`: random categorical variable between '0' and '9'
* `join_key`: unique integer in the range from 0 to ``n_rows_population``
* `time_stamp`: random number between 0 and 1
* `targets`: target variable. Defined as the number of matching entries in
the peripheral table for which ``time_stamp_peripheral <
time_stamp_population`` and the category in the peripheral table is not
1, 2 or 9
.. code-block:: sql
SELECT aggregation( column_01/* )
FROM POPULATION_TABLE t1
LEFT JOIN PERIPHERAL_TABLE t2
ON t1.join_key = t2.join_key
WHERE (
( t2.column_01 != '1' AND t2.column_01 != '2' AND t2.column_01 != '9' )
) AND t2.time_stamps <= t1.time_stamps
GROUP BY t1.join_key,
t1.time_stamp;
Args:
n_rows_population (int, optional): Number of rows in the population table
n_row_peripheral (int, optional): Number of rows in the peripheral table
random_state (int, optional): Determines random number generation for
dataset creation. Pass an int for reproducible output across
multiple function calls.
Returns:
:class:`pandas.DataFrame`: Population table
:class:`pandas.DataFrame`: Peripheral table
"""
random = np.random.RandomState(random_state)
population_table = pd.DataFrame()
population_table["column_01"] = random.randint(0, 10, n_rows_population).astype(np.str)
population_table["join_key"] = np.arange(n_rows_population)
population_table["time_stamp_population"] = random.rand(n_rows_population)
peripheral_table = pd.DataFrame()
peripheral_table["column_01"] = random.randint(0, 10, n_rows_peripheral).astype(np.str)
peripheral_table["join_key"] = random.randint(0, n_rows_population, n_rows_peripheral)
peripheral_table["time_stamp_peripheral"] = random.rand(n_rows_peripheral)
# Compute targets
temp = peripheral_table.merge(
population_table[["join_key", "time_stamp_population"]],
how="left",
on="join_key"
)
# Apply some conditions
temp = temp[
(temp["time_stamp_peripheral"] <= temp["time_stamp_population"]) &
(temp["column_01"] != "1") &
(temp["column_01"] != "2") &
(temp["column_01"] != "9")
]
# Define the aggregation
temp = _aggregate(temp, aggregation, "column_01", "join_key")
temp = temp.rename(index=str, columns={"column_01": "targets"})
population_table = population_table.merge(
temp,
how="left",
on="join_key"
)
del temp
population_table = population_table.rename(
index=str, columns={"time_stamp_population": "time_stamp"})
peripheral_table = peripheral_table.rename(
index=str, columns={"time_stamp_peripheral": "time_stamp"})
# Replace NaN targets with 0.0 - target values may never be NaN!.
population_table.targets = np.where(
np.isnan(population_table['targets']),
0,
population_table['targets'])
return population_table, peripheral_table
# -----------------------------------------------------------------------------
def make_discrete(n_rows_population=500,
n_rows_peripheral=125000,
random_state=None,
aggregation=aggregations.Count):
"""Generate a random dataset with categorical variables
The dataset consists of a population table and one peripheral table.
The peripheral table has 3 columns:
* `column_01`: random integer between -10 and 10
* `join_key`: random integer in the range from 0 to ``n_rows_population``
* `time_stamp`: random number between 0 and 1
The population table has 4 columns:
* `column_01`: random number between -1 and 1
* `join_key`: unique integer in the range from 0 to ``n_rows_population``
* `time_stamp`: random number between 0 and 1
* `targets`: target variable. Defined as the minimum value greater than 0
in the peripheral table for which ``time_stamp_periperhal <
time_stamp_population`` and the join key machtes
.. code-block:: sql
SELECT aggregation( column_01/* )
FROM POPULATION t1
LEFT JOIN PERIPHERAL t2
ON t1.join_key = t2.join_key
WHERE (
( t2.column_01 > 0 )
) AND t2.time_stamp <= t1.time_stamp
GROUP BY t1.join_key,
t1.time_stamp;
Args:
n_rows_population (int, optional): Number of rows in the population table
n_row_peripheral (int, optional): Number of rows in the peripheral table
random_state (int, optional): Determines random number generation for
dataset creation. Pass an int for reproducible output across
multiple function calls.
Returns:
:class:`pandas.DataFrame`: Population table
:class:`pandas.DataFrame`: Peripheral table
"""
random = np.random.RandomState(random_state)
population_table = pd.DataFrame()
population_table["column_01"] = random.randint(0, 10, n_rows_population).astype(np.str)
population_table["join_key"] = np.arange(n_rows_population)
population_table["time_stamp_population"] = random.rand(n_rows_population)
peripheral_table = pd.DataFrame()
peripheral_table["column_01"] = random.randint(-11, 11, n_rows_peripheral)
peripheral_table["join_key"] = random.randint(0, n_rows_population, n_rows_peripheral)
peripheral_table["time_stamp_peripheral"] = random.rand(n_rows_peripheral)
# Compute targets
temp = peripheral_table.merge(
population_table[["join_key", "time_stamp_population"]],
how="left",
on="join_key"
)
# Apply some conditions
temp = temp[
(temp["time_stamp_peripheral"] <= temp["time_stamp_population"]) &
(temp["column_01"] > 0.0)
]
# Define the aggregation
temp = _aggregate(temp, aggregation, "column_01", "join_key")
temp = temp.rename(index=str, columns={"column_01": "targets"})
population_table = population_table.merge(
temp,
how="left",
on="join_key"
)
del temp
population_table = population_table.rename(
index=str, columns={"time_stamp_population": "time_stamp"})
peripheral_table = peripheral_table.rename(
index=str, columns={"time_stamp_peripheral": "time_stamp"})
# Replace NaN targets with 0.0 - target values may never be NaN!.
population_table.targets = np.where(
np.isnan(population_table['targets']),
0,
population_table['targets'])
return population_table, peripheral_table
# -----------------------------------------------------------------------------
def make_numerical(n_rows_population=500,
n_rows_peripheral=125000,
random_state=None,
aggregation=aggregations.Count):
"""Generate a random dataset with continous numerical variables
The dataset consists of a population table and one peripheral table.
The peripheral table has 3 columns:
* `column_01`: random number between -1 and 1
* `join_key`: random integer in the range from 0 to ``n_rows_population``
* `time_stamp`: random number between 0 and 1
The population table has 4 columns:
* `column_01`: random number between -1 and 1
* `join_key`: unique integer in the range from 0 to ``n_rows_population``
* `time_stamp`: random number between 0 and 1
* `targets`: target variable. Defined as the number of matching entries in
the peripheral table for which ``time_stamp_periperhal <
time_stamp_population < time_stamp_peripheral + 0.5``
.. code-block:: sql
SELECT aggregation( column_01/* )
FROM POPULATION t1
LEFT JOIN PERIPHERAL t2
ON t1.join_key = t2.join_key
WHERE (
( t1.time_stamp - t2.time_stamp <= 0.5 )
) AND t2.time_stamp <= t1.time_stamp
GROUP BY t1.join_key,
t1.time_stamp;
Args:
n_rows_population (int, optional): Number of rows in the population table
n_row_peripheral (int, optional): Number of rows in the peripheral table
random_state (int, optional): Determines random number generation for
dataset creation. Pass an int for reproducible output across
multiple function calls.
Returns:
:class:`pandas.DataFrame`: Population table
:class:`pandas.DataFrame`: Peripheral table
"""
random = np.random.RandomState(random_state)
population_table = pd.DataFrame()
population_table["column_01"] = random.rand(n_rows_population) * 2.0 - 1.0
population_table["join_key"] = np.arange(n_rows_population)
population_table["time_stamp_population"] = random.rand(n_rows_population)
peripheral_table = pd.DataFrame()
peripheral_table["column_01"] = random.rand(n_rows_peripheral) * 2.0 - 1.0
peripheral_table["join_key"] = random.randint(0, n_rows_population, n_rows_peripheral)
peripheral_table["time_stamp_peripheral"] = random.rand(n_rows_peripheral)
# Compute targets
temp = peripheral_table.merge(
population_table[["join_key", "time_stamp_population"]],
how="left",
on="join_key"
)
# Apply some conditions
temp = temp[
(temp["time_stamp_peripheral"] <= temp["time_stamp_population"]) &
(temp["time_stamp_peripheral"] >= temp["time_stamp_population"] - 0.5)
]
# Define the aggregation
temp = _aggregate(temp, aggregation, "column_01", "join_key")
temp = temp.rename(index=str, columns={"column_01": "targets"})
population_table = population_table.merge(
temp,
how="left",
on="join_key"
)
del temp
population_table = population_table.rename(
index=str, columns={"time_stamp_population": "time_stamp"})
peripheral_table = peripheral_table.rename(
index=str, columns={"time_stamp_peripheral": "time_stamp"})
# Replace NaN targets with 0.0 - target values may never be NaN!.
population_table.targets = np.where(
np.isnan(population_table['targets']),
0,
population_table['targets'])
return population_table, peripheral_table
# -----------------------------------------------------------------------------
def make_same_units_categorical(n_rows_population=500,
n_rows_peripheral=125000,
random_state=None,
aggregation=aggregations.Count):
"""Generate a random dataset with categorical variables
The dataset consists of a population table and one peripheral table.
The peripheral table has 3 columns:
* `column_01`: random categorical variable between '0' and '9'
* `join_key`: random integer in the range from 0 to ``n_rows_population``
* `time_stamp`: random number between 0 and 1
The population table has 4 columns:
* `column_01`: random categorical variable between '0' and '9'
* `join_key`: unique integer in the range from 0 to ``n_rows_population``
* `time_stamp`: random number between 0 and 1
* `targets`: target variable. Defined as the number of matching entries in
the peripheral table for which ``time_stamp_peripheral <
time_stamp_population`` and the category in the peripheral table is not
1, 2 or 9
.. code-block:: sql
SELECT aggregation( column_01/* )
FROM POPULATION_TABLE t1
LEFT JOIN PERIPHERAL_TABLE t2
ON t1.join_key = t2.join_key
WHERE (
( t1.column_01 == t2.column_01 )
) AND t2.time_stamps <= t1.time_stamps
GROUP BY t1.join_key,
t1.time_stamp;
Args:
n_rows_population (int, optional): Number of rows in the population table
n_row_peripheral (int, optional): Number of rows in the peripheral table
random_state (int, optional): Determines random number generation for
dataset creation. Pass an int for reproducible output across
multiple function calls.
Returns:
:class:`pandas.DataFrame`: Population table
:class:`pandas.DataFrame`: Peripheral table
"""
population_table = pd.DataFrame()
population_table["column_01_population"] = (np.random.rand(
n_rows_population)*10.0).astype(np.int).astype(np.str)
population_table["join_key"] = range(n_rows_population)
population_table["time_stamp_population"] = np.random.rand(n_rows_population)
peripheral_table = pd.DataFrame()
peripheral_table["column_01_peripheral"] = (np.random.rand(
n_rows_peripheral)*10.0).astype(np.int).astype(np.str)
peripheral_table["column_02"] = np.random.rand(n_rows_peripheral) * 2.0 - 1.0
peripheral_table["join_key"] = [
int(float(n_rows_population) * np.random.rand(1)[0]) for i in range(n_rows_peripheral)]
peripheral_table["time_stamp_peripheral"] = np.random.rand(n_rows_peripheral)
# ----------------
temp = peripheral_table.merge(
population_table[["join_key", "time_stamp_population",
"column_01_population"]],
how="left",
on="join_key"
)
# Apply some conditions
temp = temp[
(temp["time_stamp_peripheral"] <= temp["time_stamp_population"]) &
(temp["column_01_peripheral"] == temp["column_01_population"])
]
# Define the aggregation
temp = _aggregate(temp, aggregation, "column_02", "join_key")
temp = temp.rename(index=str, columns={"column_02": "targets"})
population_table = population_table.merge(
temp,
how="left",
on="join_key"
)
population_table = population_table.rename(
index=str,
columns={"column_01_population": "column_01"}
)
peripheral_table = peripheral_table.rename(
index=str,
columns={"column_01_peripheral": "column_01"}
)
del temp
# ----------------
population_table = population_table.rename(
index=str, columns={"time_stamp_population": "time_stamp"})
peripheral_table = peripheral_table.rename(
index=str, columns={"time_stamp_peripheral": "time_stamp"})
# ----------------
# Replace NaN targets with 0.0 - target values may never be NaN!.
population_table["targets"] = [
0.0 if val != val else val for val in population_table["targets"]
]
# ----------------
return population_table, peripheral_table
# -----------------------------------------------------------------------------
def make_same_units_numerical(n_rows_population=500,
n_rows_peripheral=125000,
random_state=None,
aggregation=aggregations.Count):
"""Generate a random dataset with continous numerical variables
The dataset consists of a population table and one peripheral table.
The peripheral table has 3 columns:
* `column_01`: random number between -1 and 1
* `join_key`: random integer in the range from 0 to ``n_rows_population``
* `time_stamp`: random number between 0 and 1
The population table has 4 columns:
* `column_01`: random number between -1 and 1
* `join_key`: unique integer in the range from 0 to ``n_rows_population``
* `time_stamp`: random number between 0 and 1
* `targets`: target variable. Defined as the number of matching entries in
the peripheral table for which ``time_stamp_periperhal <
time_stamp_population < time_stamp_peripheral + 0.5``
.. code-block:: sql
SELECT aggregation( column_01/* )
FROM POPULATION t1
LEFT JOIN PERIPHERAL t2
ON t1.join_key = t2.join_key
WHERE (
( t1.column_01 - t2.column_01 <= 0.5 )
) AND t2.time_stamp <= t1.time_stamp
GROUP BY t1.join_key,
t1.time_stamp;
Args:
n_rows_population (int, optional): Number of rows in the population table
n_row_peripheral (int, optional): Number of rows in the peripheral table
random_state (int, optional): Determines random number generation for
dataset creation. Pass an int for reproducible output across
multiple function calls.
Returns:
:class:`pandas.DataFrame`: Population table
:class:`pandas.DataFrame`: Peripheral table
"""
random = np.random.RandomState(random_state)
population_table = pd.DataFrame()
population_table["column_01_population"] = np.random.rand(n_rows_population) * 2.0 - 1.0
population_table["join_key"] = range(n_rows_population)
population_table["time_stamp_population"] = np.random.rand(n_rows_population)
peripheral_table = pd.DataFrame()
peripheral_table["column_01_peripheral"] = np.random.rand(n_rows_peripheral) * 2.0 - 1.0
peripheral_table["join_key"] = [
int(float(n_rows_population) * np.random.rand(1)[0]) for i in range(n_rows_peripheral)]
peripheral_table["time_stamp_peripheral"] = np.random.rand(n_rows_peripheral)
# ----------------
temp = peripheral_table.merge(
population_table[["join_key", "time_stamp_population",
"column_01_population"]],
how="left",
on="join_key"
)
# Apply some conditions
temp = temp[
(temp["time_stamp_peripheral"] <= temp["time_stamp_population"]) &
(temp["column_01_peripheral"] > temp["column_01_population"] - 0.5)
]
# Define the aggregation
temp = temp[["column_01_peripheral", "join_key"]].groupby(
["join_key"],
as_index=False
).count()
temp = temp.rename(index=str, columns={"column_01_peripheral": "targets"})
population_table = population_table.merge(
temp,
how="left",
on="join_key"
)
population_table = population_table.rename(
index=str,
columns={"column_01_population": "column_01"}
)
peripheral_table = peripheral_table.rename(
index=str,
columns={"column_01_peripheral": "column_01"}
)
del temp
# ----------------
population_table = population_table.rename(
index=str, columns={"time_stamp_population": "time_stamp"})
peripheral_table = peripheral_table.rename(
index=str, columns={"time_stamp_peripheral": "time_stamp"})
# ----------------
# Replace NaN targets with 0.0 - target values may never be NaN!.
population_table["targets"] = [
0.0 if val != val else val for val in population_table["targets"]
]
return population_table, peripheral_table
# -----------------------------------------------------------------------------
def make_snowflake(n_rows_population=500,
n_rows_peripheral1=5000,
n_rows_peripheral2=125000,
aggregation1=aggregations.Sum,
aggregation2=aggregations.Count,
random_state=None):
"""Generate a random dataset with continous numerical variables
The dataset consists of a population table and two peripheral tables.
The first peripheral table has 4 columns:
* `column_01`: random number between -1 and 1
* `join_key`: random integer in the range from 0 to ``n_rows_population``
* `join_key2`: unique integer in the range from 0 to ``n_rows_peripheral1``
* `time_stamp`: random number between 0 and 1
The second peripheral table has 3 columns:
* `column_01`: random number between -1 and 1
* `join_key2`: random integer in the range from 0 to ``n_rows_peripheral1``
* `time_stamp`: random number between 0 and 1
The population table has 4 columns:
* `column_01`: random number between -1 and 1
* `join_key`: unique integer in the range from 0 to ``n_rows_population``
* `time_stamp`: random number between 0 and 1
* `targets`: target variable as defined by the SQL block below:
.. code-block:: sql
SELECT aggregation1( feature_1_1/* )
FROM POPULATION t1
LEFT JOIN (
SELECT aggregation2( t4.column_01 ) AS feature_1_1
FROM PERIPHERAL t3
LEFT JOIN PERIPHERAL2 t4
ON t3.join_key2 = t4.join_key2
WHERE (
( t3.time_stamp - t4.time_stamp <= 0.5 )
) AND t4.time_stamp <= t3.time_stamp
GROUP BY t3.join_key,
t3.time_stamp
) t2
ON t1.join_key = t2.join_key
WHERE t2.time_stamp <= t1.time_stamp
GROUP BY t1.join_key,
t1.time_stamp;
Args:
n_rows_population (int, optional): Number of rows in the population table
n_row_peripheral1 (int, optional): Number of rows in the first peripheral table
n_row_peripheral2 (int, optional): Number of rows in the second peripheral table
aggregation1 (string, optional): Aggregation to be used to aggregate the first peripheral table
aggregation2 (string, optional): Aggregation to be used to aggregate the second peripheral table
random_state (int, optional): Determines random number generation for
dataset creation. Pass an int for reproducible output across
multiple function calls.
Returns:
:class:`pandas.DataFrame`: Population table
:class:`pandas.DataFrame`: First peripheral table
:class:`pandas.DataFrame`: Second peripheral table
"""
random = np.random.RandomState(random_state)
population_table = pd.DataFrame()
population_table["column_01"] = np.random.rand(n_rows_population) * 2.0 - 1.0
population_table["join_key"] = range(n_rows_population)
population_table["time_stamp_population"] = np.random.rand(n_rows_population)
peripheral_table = pd.DataFrame()
peripheral_table["column_01"] = np.random.rand(n_rows_peripheral1) * 2.0 - 1.0
peripheral_table["join_key"] = [
int(float(n_rows_population) * np.random.rand(1)[0]) for i in range(n_rows_peripheral1)]
peripheral_table["join_key2"] = range(n_rows_peripheral1)
peripheral_table["time_stamp_peripheral"] = np.random.rand(n_rows_peripheral1)
peripheral_table2 = pd.DataFrame()
peripheral_table2["column_01"] = np.random.rand(n_rows_peripheral2) * 2.0 - 1.0
peripheral_table2["join_key2"] = [
int(float(n_rows_peripheral1) * np.random.rand(1)[0]) for i in range(n_rows_peripheral2)]
peripheral_table2["time_stamp_peripheral2"] = np.random.rand(n_rows_peripheral2)
# ----------------
# Merge peripheral_table with peripheral_table2
temp = peripheral_table2.merge(
peripheral_table[["join_key2", "time_stamp_peripheral"]],
how="left",
on="join_key2"
)
# Apply some conditions
temp = temp[
(temp["time_stamp_peripheral2"] <= temp["time_stamp_peripheral"]) &
(temp["time_stamp_peripheral2"] >= temp["time_stamp_peripheral"] - 0.5)
]
# Define the aggregation
temp = _aggregate(temp, aggregation2, "column_01", "join_key2")
temp = temp.rename(index=str, columns={"column_01": "temporary"})
peripheral_table = peripheral_table.merge(
temp,
how="left",
on="join_key2"
)
del temp
# Replace NaN with 0.0
peripheral_table["temporary"] = [
0.0 if val != val else val for val in peripheral_table["temporary"]
]
# ----------------
# Merge population_table with peripheral_table
temp2 = peripheral_table.merge(
population_table[["join_key", "time_stamp_population"]],
how="left",
on="join_key"
)
# Apply some conditions
temp2 = temp2[
(temp2["time_stamp_peripheral"] <= temp2["time_stamp_population"])
]
# Define the aggregation
temp2 = _aggregate(temp2, aggregation1, "temporary", "join_key")
temp2 = temp2.rename(index=str, columns={"temporary": "targets"})
population_table = population_table.merge(
temp2,
how="left",
on="join_key"
)
del temp2
# Replace NaN targets with 0.0 - target values may never be NaN!.
population_table["targets"] = [
0.0 if val != val else val for val in population_table["targets"]
]
# Remove temporary column.
del peripheral_table["temporary"]
# ----------------
population_table = population_table.rename(
index=str, columns={"time_stamp_population": "time_stamp"})
peripheral_table = peripheral_table.rename(
index=str, columns={"time_stamp_peripheral": "time_stamp"})
peripheral_table2 = peripheral_table2.rename(
index=str, columns={"time_stamp_peripheral2": "time_stamp"})
# ----------------
return population_table, peripheral_table, peripheral_table2
# -----------------------------------------------------------------------------
| 34.150367
| 104
| 0.634187
| 3,358
| 27,935
| 5.05271
| 0.056284
| 0.058349
| 0.042435
| 0.029705
| 0.882772
| 0.873048
| 0.859963
| 0.851476
| 0.82796
| 0.817233
| 0
| 0.025013
| 0.241489
| 27,935
| 817
| 105
| 34.192166
| 0.775733
| 0.418185
| 0
| 0.669468
| 1
| 0
| 0.15393
| 0.069207
| 0
| 0
| 0
| 0
| 0
| 1
| 0.019608
| false
| 0
| 0.008403
| 0
| 0.072829
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 7
|
df5773d150fa0e18247744ff7fb2d888d3c09a23
| 94,209
|
py
|
Python
|
octopus_deploy_swagger_client/octopus_deploy_client/projects_api.py
|
cvent/octopus-deploy-api-client
|
0e03e842e1beb29b132776aee077df570b88366a
|
[
"Apache-2.0"
] | null | null | null |
octopus_deploy_swagger_client/octopus_deploy_client/projects_api.py
|
cvent/octopus-deploy-api-client
|
0e03e842e1beb29b132776aee077df570b88366a
|
[
"Apache-2.0"
] | null | null | null |
octopus_deploy_swagger_client/octopus_deploy_client/projects_api.py
|
cvent/octopus-deploy-api-client
|
0e03e842e1beb29b132776aee077df570b88366a
|
[
"Apache-2.0"
] | null | null | null |
# coding: utf-8
"""
Octopus Server API
No description provided (generated by Swagger Codegen https://github.com/swagger-api/swagger-codegen) # noqa: E501
OpenAPI spec version: 2019.6.7+Branch.tags-2019.6.7.Sha.aa18dc6809953218c66f57eff7d26481d9b23d6a
Generated by: https://github.com/swagger-api/swagger-codegen.git
"""
from __future__ import absolute_import
import re # noqa: F401
# python 2 and python 3 compatibility library
import six
from octopus_deploy_swagger_client.api_client import ApiClient
class ProjectsApi(object):
"""NOTE: This class is auto generated by the swagger code generator program.
Do not edit the class manually.
Ref: https://github.com/swagger-api/swagger-codegen
"""
def __init__(self, api_client=None):
if api_client is None:
api_client = ApiClient()
self.api_client = api_client
def create_response_descriptor_projects_project_project_resource(self, **kwargs): # noqa: E501
"""Create a ProjectResource # noqa: E501
Creates a new project. # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.create_response_descriptor_projects_project_project_resource(async_req=True)
>>> result = thread.get()
:param async_req bool
:param ProjectResource project_resource: The ProjectResource resource to create
:return: ProjectResource
If the method is called asynchronously,
returns the request thread.
"""
kwargs['_return_http_data_only'] = True
if kwargs.get('async_req'):
return self.create_response_descriptor_projects_project_project_resource_with_http_info(**kwargs) # noqa: E501
else:
(data) = self.create_response_descriptor_projects_project_project_resource_with_http_info(**kwargs) # noqa: E501
return data
def create_response_descriptor_projects_project_project_resource_with_http_info(self, **kwargs): # noqa: E501
"""Create a ProjectResource # noqa: E501
Creates a new project. # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.create_response_descriptor_projects_project_project_resource_with_http_info(async_req=True)
>>> result = thread.get()
:param async_req bool
:param ProjectResource project_resource: The ProjectResource resource to create
:return: ProjectResource
If the method is called asynchronously,
returns the request thread.
"""
all_params = ['project_resource'] # noqa: E501
all_params.append('async_req')
all_params.append('_return_http_data_only')
all_params.append('_preload_content')
all_params.append('_request_timeout')
params = locals()
for key, val in six.iteritems(params['kwargs']):
if key not in all_params:
raise TypeError(
"Got an unexpected keyword argument '%s'"
" to method create_response_descriptor_projects_project_project_resource" % key
)
params[key] = val
del params['kwargs']
collection_formats = {}
path_params = {}
query_params = []
header_params = {}
form_params = []
local_var_files = {}
body_params = None
if 'project_resource' in params:
body_params = params['project_resource']
# HTTP header `Accept`
header_params['Accept'] = self.api_client.select_header_accept(
['application/json']) # noqa: E501
# Authentication setting
auth_settings = ['APIKeyHeader', 'APIKeyQuery', 'NugetApiKeyHeader'] # noqa: E501
return self.api_client.call_api(
'/api/projects', 'POST',
path_params,
query_params,
header_params,
body=body_params,
post_params=form_params,
files=local_var_files,
response_type='ProjectResource', # noqa: E501
auth_settings=auth_settings,
async_req=params.get('async_req'),
_return_http_data_only=params.get('_return_http_data_only'),
_preload_content=params.get('_preload_content', True),
_request_timeout=params.get('_request_timeout'),
collection_formats=collection_formats)
def create_response_descriptor_projects_project_project_resource_spaces(self, base_space_id, **kwargs): # noqa: E501
"""Create a ProjectResource # noqa: E501
Creates a new project. # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.create_response_descriptor_projects_project_project_resource_spaces(base_space_id, async_req=True)
>>> result = thread.get()
:param async_req bool
:param str base_space_id: ID of the space (required)
:param ProjectResource project_resource: The ProjectResource resource to create
:return: ProjectResource
If the method is called asynchronously,
returns the request thread.
"""
kwargs['_return_http_data_only'] = True
if kwargs.get('async_req'):
return self.create_response_descriptor_projects_project_project_resource_spaces_with_http_info(base_space_id, **kwargs) # noqa: E501
else:
(data) = self.create_response_descriptor_projects_project_project_resource_spaces_with_http_info(base_space_id, **kwargs) # noqa: E501
return data
def create_response_descriptor_projects_project_project_resource_spaces_with_http_info(self, base_space_id, **kwargs): # noqa: E501
"""Create a ProjectResource # noqa: E501
Creates a new project. # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.create_response_descriptor_projects_project_project_resource_spaces_with_http_info(base_space_id, async_req=True)
>>> result = thread.get()
:param async_req bool
:param str base_space_id: ID of the space (required)
:param ProjectResource project_resource: The ProjectResource resource to create
:return: ProjectResource
If the method is called asynchronously,
returns the request thread.
"""
all_params = ['base_space_id', 'project_resource'] # noqa: E501
all_params.append('async_req')
all_params.append('_return_http_data_only')
all_params.append('_preload_content')
all_params.append('_request_timeout')
params = locals()
for key, val in six.iteritems(params['kwargs']):
if key not in all_params:
raise TypeError(
"Got an unexpected keyword argument '%s'"
" to method create_response_descriptor_projects_project_project_resource_spaces" % key
)
params[key] = val
del params['kwargs']
# verify the required parameter 'base_space_id' is set
if ('base_space_id' not in params or
params['base_space_id'] is None):
raise ValueError("Missing the required parameter `base_space_id` when calling `create_response_descriptor_projects_project_project_resource_spaces`") # noqa: E501
collection_formats = {}
path_params = {}
if 'base_space_id' in params:
path_params['baseSpaceId'] = params['base_space_id'] # noqa: E501
query_params = []
header_params = {}
form_params = []
local_var_files = {}
body_params = None
if 'project_resource' in params:
body_params = params['project_resource']
# HTTP header `Accept`
header_params['Accept'] = self.api_client.select_header_accept(
['application/json']) # noqa: E501
# Authentication setting
auth_settings = ['APIKeyHeader', 'APIKeyQuery', 'NugetApiKeyHeader'] # noqa: E501
return self.api_client.call_api(
'/api/{baseSpaceId}/projects', 'POST',
path_params,
query_params,
header_params,
body=body_params,
post_params=form_params,
files=local_var_files,
response_type='ProjectResource', # noqa: E501
auth_settings=auth_settings,
async_req=params.get('async_req'),
_return_http_data_only=params.get('_return_http_data_only'),
_preload_content=params.get('_preload_content', True),
_request_timeout=params.get('_request_timeout'),
collection_formats=collection_formats)
def custom_action_response_descriptor_octopus_server_web_api_actions_project_logo_get_responder(self, id, **kwargs): # noqa: E501
"""custom_action_response_descriptor_octopus_server_web_api_actions_project_logo_get_responder # noqa: E501
Gets the logo associated with the project. NOTE: This definition is not complete. We will be adding more detail in future releases of Octopus. # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.custom_action_response_descriptor_octopus_server_web_api_actions_project_logo_get_responder(id, async_req=True)
>>> result = thread.get()
:param async_req bool
:param str id: ID of the resource (required)
:return: file
If the method is called asynchronously,
returns the request thread.
"""
kwargs['_return_http_data_only'] = True
if kwargs.get('async_req'):
return self.custom_action_response_descriptor_octopus_server_web_api_actions_project_logo_get_responder_with_http_info(id, **kwargs) # noqa: E501
else:
(data) = self.custom_action_response_descriptor_octopus_server_web_api_actions_project_logo_get_responder_with_http_info(id, **kwargs) # noqa: E501
return data
def custom_action_response_descriptor_octopus_server_web_api_actions_project_logo_get_responder_with_http_info(self, id, **kwargs): # noqa: E501
"""custom_action_response_descriptor_octopus_server_web_api_actions_project_logo_get_responder # noqa: E501
Gets the logo associated with the project. NOTE: This definition is not complete. We will be adding more detail in future releases of Octopus. # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.custom_action_response_descriptor_octopus_server_web_api_actions_project_logo_get_responder_with_http_info(id, async_req=True)
>>> result = thread.get()
:param async_req bool
:param str id: ID of the resource (required)
:return: file
If the method is called asynchronously,
returns the request thread.
"""
all_params = ['id'] # noqa: E501
all_params.append('async_req')
all_params.append('_return_http_data_only')
all_params.append('_preload_content')
all_params.append('_request_timeout')
params = locals()
for key, val in six.iteritems(params['kwargs']):
if key not in all_params:
raise TypeError(
"Got an unexpected keyword argument '%s'"
" to method custom_action_response_descriptor_octopus_server_web_api_actions_project_logo_get_responder" % key
)
params[key] = val
del params['kwargs']
# verify the required parameter 'id' is set
if ('id' not in params or
params['id'] is None):
raise ValueError("Missing the required parameter `id` when calling `custom_action_response_descriptor_octopus_server_web_api_actions_project_logo_get_responder`") # noqa: E501
collection_formats = {}
path_params = {}
if 'id' in params:
path_params['id'] = params['id'] # noqa: E501
query_params = []
header_params = {}
form_params = []
local_var_files = {}
body_params = None
# HTTP header `Accept`
header_params['Accept'] = self.api_client.select_header_accept(
['image/png']) # noqa: E501
# Authentication setting
auth_settings = ['APIKeyHeader', 'APIKeyQuery', 'NugetApiKeyHeader'] # noqa: E501
return self.api_client.call_api(
'/api/projects/{id}/logo', 'GET',
path_params,
query_params,
header_params,
body=body_params,
post_params=form_params,
files=local_var_files,
response_type='file', # noqa: E501
auth_settings=auth_settings,
async_req=params.get('async_req'),
_return_http_data_only=params.get('_return_http_data_only'),
_preload_content=params.get('_preload_content', True),
_request_timeout=params.get('_request_timeout'),
collection_formats=collection_formats)
def custom_action_response_descriptor_octopus_server_web_api_actions_project_logo_get_responder_spaces(self, base_space_id, id, **kwargs): # noqa: E501
"""custom_action_response_descriptor_octopus_server_web_api_actions_project_logo_get_responder_spaces # noqa: E501
Gets the logo associated with the project. NOTE: This definition is not complete. We will be adding more detail in future releases of Octopus. # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.custom_action_response_descriptor_octopus_server_web_api_actions_project_logo_get_responder_spaces(base_space_id, id, async_req=True)
>>> result = thread.get()
:param async_req bool
:param str base_space_id: ID of the space (required)
:param str id: ID of the resource (required)
:return: file
If the method is called asynchronously,
returns the request thread.
"""
kwargs['_return_http_data_only'] = True
if kwargs.get('async_req'):
return self.custom_action_response_descriptor_octopus_server_web_api_actions_project_logo_get_responder_spaces_with_http_info(base_space_id, id, **kwargs) # noqa: E501
else:
(data) = self.custom_action_response_descriptor_octopus_server_web_api_actions_project_logo_get_responder_spaces_with_http_info(base_space_id, id, **kwargs) # noqa: E501
return data
def custom_action_response_descriptor_octopus_server_web_api_actions_project_logo_get_responder_spaces_with_http_info(self, base_space_id, id, **kwargs): # noqa: E501
"""custom_action_response_descriptor_octopus_server_web_api_actions_project_logo_get_responder_spaces # noqa: E501
Gets the logo associated with the project. NOTE: This definition is not complete. We will be adding more detail in future releases of Octopus. # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.custom_action_response_descriptor_octopus_server_web_api_actions_project_logo_get_responder_spaces_with_http_info(base_space_id, id, async_req=True)
>>> result = thread.get()
:param async_req bool
:param str base_space_id: ID of the space (required)
:param str id: ID of the resource (required)
:return: file
If the method is called asynchronously,
returns the request thread.
"""
all_params = ['base_space_id', 'id'] # noqa: E501
all_params.append('async_req')
all_params.append('_return_http_data_only')
all_params.append('_preload_content')
all_params.append('_request_timeout')
params = locals()
for key, val in six.iteritems(params['kwargs']):
if key not in all_params:
raise TypeError(
"Got an unexpected keyword argument '%s'"
" to method custom_action_response_descriptor_octopus_server_web_api_actions_project_logo_get_responder_spaces" % key
)
params[key] = val
del params['kwargs']
# verify the required parameter 'base_space_id' is set
if ('base_space_id' not in params or
params['base_space_id'] is None):
raise ValueError("Missing the required parameter `base_space_id` when calling `custom_action_response_descriptor_octopus_server_web_api_actions_project_logo_get_responder_spaces`") # noqa: E501
# verify the required parameter 'id' is set
if ('id' not in params or
params['id'] is None):
raise ValueError("Missing the required parameter `id` when calling `custom_action_response_descriptor_octopus_server_web_api_actions_project_logo_get_responder_spaces`") # noqa: E501
collection_formats = {}
path_params = {}
if 'base_space_id' in params:
path_params['baseSpaceId'] = params['base_space_id'] # noqa: E501
if 'id' in params:
path_params['id'] = params['id'] # noqa: E501
query_params = []
header_params = {}
form_params = []
local_var_files = {}
body_params = None
# HTTP header `Accept`
header_params['Accept'] = self.api_client.select_header_accept(
['image/png']) # noqa: E501
# Authentication setting
auth_settings = ['APIKeyHeader', 'APIKeyQuery', 'NugetApiKeyHeader'] # noqa: E501
return self.api_client.call_api(
'/api/{baseSpaceId}/projects/{id}/logo', 'GET',
path_params,
query_params,
header_params,
body=body_params,
post_params=form_params,
files=local_var_files,
response_type='file', # noqa: E501
auth_settings=auth_settings,
async_req=params.get('async_req'),
_return_http_data_only=params.get('_return_http_data_only'),
_preload_content=params.get('_preload_content', True),
_request_timeout=params.get('_request_timeout'),
collection_formats=collection_formats)
def custom_action_response_descriptor_octopus_server_web_api_actions_project_logo_put_responder(self, id, **kwargs): # noqa: E501
"""custom_action_response_descriptor_octopus_server_web_api_actions_project_logo_put_responder # noqa: E501
Updates the logo associated with the project. NOTE: This definition is not complete. We will be adding more detail in future releases of Octopus. # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.custom_action_response_descriptor_octopus_server_web_api_actions_project_logo_put_responder(id, async_req=True)
>>> result = thread.get()
:param async_req bool
:param str id: ID of the resource (required)
:return: None
If the method is called asynchronously,
returns the request thread.
"""
kwargs['_return_http_data_only'] = True
if kwargs.get('async_req'):
return self.custom_action_response_descriptor_octopus_server_web_api_actions_project_logo_put_responder_with_http_info(id, **kwargs) # noqa: E501
else:
(data) = self.custom_action_response_descriptor_octopus_server_web_api_actions_project_logo_put_responder_with_http_info(id, **kwargs) # noqa: E501
return data
def custom_action_response_descriptor_octopus_server_web_api_actions_project_logo_put_responder_with_http_info(self, id, **kwargs): # noqa: E501
"""custom_action_response_descriptor_octopus_server_web_api_actions_project_logo_put_responder # noqa: E501
Updates the logo associated with the project. NOTE: This definition is not complete. We will be adding more detail in future releases of Octopus. # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.custom_action_response_descriptor_octopus_server_web_api_actions_project_logo_put_responder_with_http_info(id, async_req=True)
>>> result = thread.get()
:param async_req bool
:param str id: ID of the resource (required)
:return: None
If the method is called asynchronously,
returns the request thread.
"""
all_params = ['id'] # noqa: E501
all_params.append('async_req')
all_params.append('_return_http_data_only')
all_params.append('_preload_content')
all_params.append('_request_timeout')
params = locals()
for key, val in six.iteritems(params['kwargs']):
if key not in all_params:
raise TypeError(
"Got an unexpected keyword argument '%s'"
" to method custom_action_response_descriptor_octopus_server_web_api_actions_project_logo_put_responder" % key
)
params[key] = val
del params['kwargs']
# verify the required parameter 'id' is set
if ('id' not in params or
params['id'] is None):
raise ValueError("Missing the required parameter `id` when calling `custom_action_response_descriptor_octopus_server_web_api_actions_project_logo_put_responder`") # noqa: E501
collection_formats = {}
path_params = {}
if 'id' in params:
path_params['id'] = params['id'] # noqa: E501
query_params = []
header_params = {}
form_params = []
local_var_files = {}
body_params = None
# HTTP header `Accept`
header_params['Accept'] = self.api_client.select_header_accept(
['application/json']) # noqa: E501
# Authentication setting
auth_settings = ['APIKeyHeader', 'APIKeyQuery', 'NugetApiKeyHeader'] # noqa: E501
return self.api_client.call_api(
'/api/projects/{id}/logo', 'PUT',
path_params,
query_params,
header_params,
body=body_params,
post_params=form_params,
files=local_var_files,
response_type=None, # noqa: E501
auth_settings=auth_settings,
async_req=params.get('async_req'),
_return_http_data_only=params.get('_return_http_data_only'),
_preload_content=params.get('_preload_content', True),
_request_timeout=params.get('_request_timeout'),
collection_formats=collection_formats)
def custom_action_response_descriptor_octopus_server_web_api_actions_project_logo_put_responder_0(self, id, **kwargs): # noqa: E501
"""custom_action_response_descriptor_octopus_server_web_api_actions_project_logo_put_responder_0 # noqa: E501
Updates the logo associated with the project. NOTE: This definition is not complete. We will be adding more detail in future releases of Octopus. # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.custom_action_response_descriptor_octopus_server_web_api_actions_project_logo_put_responder_0(id, async_req=True)
>>> result = thread.get()
:param async_req bool
:param str id: ID of the resource (required)
:return: None
If the method is called asynchronously,
returns the request thread.
"""
kwargs['_return_http_data_only'] = True
if kwargs.get('async_req'):
return self.custom_action_response_descriptor_octopus_server_web_api_actions_project_logo_put_responder_0_with_http_info(id, **kwargs) # noqa: E501
else:
(data) = self.custom_action_response_descriptor_octopus_server_web_api_actions_project_logo_put_responder_0_with_http_info(id, **kwargs) # noqa: E501
return data
def custom_action_response_descriptor_octopus_server_web_api_actions_project_logo_put_responder_0_with_http_info(self, id, **kwargs): # noqa: E501
"""custom_action_response_descriptor_octopus_server_web_api_actions_project_logo_put_responder_0 # noqa: E501
Updates the logo associated with the project. NOTE: This definition is not complete. We will be adding more detail in future releases of Octopus. # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.custom_action_response_descriptor_octopus_server_web_api_actions_project_logo_put_responder_0_with_http_info(id, async_req=True)
>>> result = thread.get()
:param async_req bool
:param str id: ID of the resource (required)
:return: None
If the method is called asynchronously,
returns the request thread.
"""
all_params = ['id'] # noqa: E501
all_params.append('async_req')
all_params.append('_return_http_data_only')
all_params.append('_preload_content')
all_params.append('_request_timeout')
params = locals()
for key, val in six.iteritems(params['kwargs']):
if key not in all_params:
raise TypeError(
"Got an unexpected keyword argument '%s'"
" to method custom_action_response_descriptor_octopus_server_web_api_actions_project_logo_put_responder_0" % key
)
params[key] = val
del params['kwargs']
# verify the required parameter 'id' is set
if ('id' not in params or
params['id'] is None):
raise ValueError("Missing the required parameter `id` when calling `custom_action_response_descriptor_octopus_server_web_api_actions_project_logo_put_responder_0`") # noqa: E501
collection_formats = {}
path_params = {}
if 'id' in params:
path_params['id'] = params['id'] # noqa: E501
query_params = []
header_params = {}
form_params = []
local_var_files = {}
body_params = None
# HTTP header `Accept`
header_params['Accept'] = self.api_client.select_header_accept(
['application/json']) # noqa: E501
# Authentication setting
auth_settings = ['APIKeyHeader', 'APIKeyQuery', 'NugetApiKeyHeader'] # noqa: E501
return self.api_client.call_api(
'/api/projects/{id}/logo', 'POST',
path_params,
query_params,
header_params,
body=body_params,
post_params=form_params,
files=local_var_files,
response_type=None, # noqa: E501
auth_settings=auth_settings,
async_req=params.get('async_req'),
_return_http_data_only=params.get('_return_http_data_only'),
_preload_content=params.get('_preload_content', True),
_request_timeout=params.get('_request_timeout'),
collection_formats=collection_formats)
def custom_action_response_descriptor_octopus_server_web_api_actions_project_logo_put_responder_spaces(self, base_space_id, id, **kwargs): # noqa: E501
"""custom_action_response_descriptor_octopus_server_web_api_actions_project_logo_put_responder_spaces # noqa: E501
Updates the logo associated with the project. NOTE: This definition is not complete. We will be adding more detail in future releases of Octopus. # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.custom_action_response_descriptor_octopus_server_web_api_actions_project_logo_put_responder_spaces(base_space_id, id, async_req=True)
>>> result = thread.get()
:param async_req bool
:param str base_space_id: ID of the space (required)
:param str id: ID of the resource (required)
:return: None
If the method is called asynchronously,
returns the request thread.
"""
kwargs['_return_http_data_only'] = True
if kwargs.get('async_req'):
return self.custom_action_response_descriptor_octopus_server_web_api_actions_project_logo_put_responder_spaces_with_http_info(base_space_id, id, **kwargs) # noqa: E501
else:
(data) = self.custom_action_response_descriptor_octopus_server_web_api_actions_project_logo_put_responder_spaces_with_http_info(base_space_id, id, **kwargs) # noqa: E501
return data
def custom_action_response_descriptor_octopus_server_web_api_actions_project_logo_put_responder_spaces_with_http_info(self, base_space_id, id, **kwargs): # noqa: E501
"""custom_action_response_descriptor_octopus_server_web_api_actions_project_logo_put_responder_spaces # noqa: E501
Updates the logo associated with the project. NOTE: This definition is not complete. We will be adding more detail in future releases of Octopus. # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.custom_action_response_descriptor_octopus_server_web_api_actions_project_logo_put_responder_spaces_with_http_info(base_space_id, id, async_req=True)
>>> result = thread.get()
:param async_req bool
:param str base_space_id: ID of the space (required)
:param str id: ID of the resource (required)
:return: None
If the method is called asynchronously,
returns the request thread.
"""
all_params = ['base_space_id', 'id'] # noqa: E501
all_params.append('async_req')
all_params.append('_return_http_data_only')
all_params.append('_preload_content')
all_params.append('_request_timeout')
params = locals()
for key, val in six.iteritems(params['kwargs']):
if key not in all_params:
raise TypeError(
"Got an unexpected keyword argument '%s'"
" to method custom_action_response_descriptor_octopus_server_web_api_actions_project_logo_put_responder_spaces" % key
)
params[key] = val
del params['kwargs']
# verify the required parameter 'base_space_id' is set
if ('base_space_id' not in params or
params['base_space_id'] is None):
raise ValueError("Missing the required parameter `base_space_id` when calling `custom_action_response_descriptor_octopus_server_web_api_actions_project_logo_put_responder_spaces`") # noqa: E501
# verify the required parameter 'id' is set
if ('id' not in params or
params['id'] is None):
raise ValueError("Missing the required parameter `id` when calling `custom_action_response_descriptor_octopus_server_web_api_actions_project_logo_put_responder_spaces`") # noqa: E501
collection_formats = {}
path_params = {}
if 'base_space_id' in params:
path_params['baseSpaceId'] = params['base_space_id'] # noqa: E501
if 'id' in params:
path_params['id'] = params['id'] # noqa: E501
query_params = []
header_params = {}
form_params = []
local_var_files = {}
body_params = None
# HTTP header `Accept`
header_params['Accept'] = self.api_client.select_header_accept(
['application/json']) # noqa: E501
# Authentication setting
auth_settings = ['APIKeyHeader', 'APIKeyQuery', 'NugetApiKeyHeader'] # noqa: E501
return self.api_client.call_api(
'/api/{baseSpaceId}/projects/{id}/logo', 'PUT',
path_params,
query_params,
header_params,
body=body_params,
post_params=form_params,
files=local_var_files,
response_type=None, # noqa: E501
auth_settings=auth_settings,
async_req=params.get('async_req'),
_return_http_data_only=params.get('_return_http_data_only'),
_preload_content=params.get('_preload_content', True),
_request_timeout=params.get('_request_timeout'),
collection_formats=collection_formats)
def custom_action_response_descriptor_octopus_server_web_api_actions_project_logo_put_responder_spaces_0(self, base_space_id, id, **kwargs): # noqa: E501
"""custom_action_response_descriptor_octopus_server_web_api_actions_project_logo_put_responder_spaces_0 # noqa: E501
Updates the logo associated with the project. NOTE: This definition is not complete. We will be adding more detail in future releases of Octopus. # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.custom_action_response_descriptor_octopus_server_web_api_actions_project_logo_put_responder_spaces_0(base_space_id, id, async_req=True)
>>> result = thread.get()
:param async_req bool
:param str base_space_id: ID of the space (required)
:param str id: ID of the resource (required)
:return: None
If the method is called asynchronously,
returns the request thread.
"""
kwargs['_return_http_data_only'] = True
if kwargs.get('async_req'):
return self.custom_action_response_descriptor_octopus_server_web_api_actions_project_logo_put_responder_spaces_0_with_http_info(base_space_id, id, **kwargs) # noqa: E501
else:
(data) = self.custom_action_response_descriptor_octopus_server_web_api_actions_project_logo_put_responder_spaces_0_with_http_info(base_space_id, id, **kwargs) # noqa: E501
return data
def custom_action_response_descriptor_octopus_server_web_api_actions_project_logo_put_responder_spaces_0_with_http_info(self, base_space_id, id, **kwargs): # noqa: E501
"""custom_action_response_descriptor_octopus_server_web_api_actions_project_logo_put_responder_spaces_0 # noqa: E501
Updates the logo associated with the project. NOTE: This definition is not complete. We will be adding more detail in future releases of Octopus. # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.custom_action_response_descriptor_octopus_server_web_api_actions_project_logo_put_responder_spaces_0_with_http_info(base_space_id, id, async_req=True)
>>> result = thread.get()
:param async_req bool
:param str base_space_id: ID of the space (required)
:param str id: ID of the resource (required)
:return: None
If the method is called asynchronously,
returns the request thread.
"""
all_params = ['base_space_id', 'id'] # noqa: E501
all_params.append('async_req')
all_params.append('_return_http_data_only')
all_params.append('_preload_content')
all_params.append('_request_timeout')
params = locals()
for key, val in six.iteritems(params['kwargs']):
if key not in all_params:
raise TypeError(
"Got an unexpected keyword argument '%s'"
" to method custom_action_response_descriptor_octopus_server_web_api_actions_project_logo_put_responder_spaces_0" % key
)
params[key] = val
del params['kwargs']
# verify the required parameter 'base_space_id' is set
if ('base_space_id' not in params or
params['base_space_id'] is None):
raise ValueError("Missing the required parameter `base_space_id` when calling `custom_action_response_descriptor_octopus_server_web_api_actions_project_logo_put_responder_spaces_0`") # noqa: E501
# verify the required parameter 'id' is set
if ('id' not in params or
params['id'] is None):
raise ValueError("Missing the required parameter `id` when calling `custom_action_response_descriptor_octopus_server_web_api_actions_project_logo_put_responder_spaces_0`") # noqa: E501
collection_formats = {}
path_params = {}
if 'base_space_id' in params:
path_params['baseSpaceId'] = params['base_space_id'] # noqa: E501
if 'id' in params:
path_params['id'] = params['id'] # noqa: E501
query_params = []
header_params = {}
form_params = []
local_var_files = {}
body_params = None
# HTTP header `Accept`
header_params['Accept'] = self.api_client.select_header_accept(
['application/json']) # noqa: E501
# Authentication setting
auth_settings = ['APIKeyHeader', 'APIKeyQuery', 'NugetApiKeyHeader'] # noqa: E501
return self.api_client.call_api(
'/api/{baseSpaceId}/projects/{id}/logo', 'POST',
path_params,
query_params,
header_params,
body=body_params,
post_params=form_params,
files=local_var_files,
response_type=None, # noqa: E501
auth_settings=auth_settings,
async_req=params.get('async_req'),
_return_http_data_only=params.get('_return_http_data_only'),
_preload_content=params.get('_preload_content', True),
_request_timeout=params.get('_request_timeout'),
collection_formats=collection_formats)
def custom_action_response_descriptor_octopus_server_web_api_actions_projects_project_settings_metadata_responder(self, id, **kwargs): # noqa: E501
"""custom_action_response_descriptor_octopus_server_web_api_actions_projects_project_settings_metadata_responder # noqa: E501
Gets the custom settings metadata from the extensions. NOTE: This definition is not complete. We will be adding more detail in future releases of Octopus. # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.custom_action_response_descriptor_octopus_server_web_api_actions_projects_project_settings_metadata_responder(id, async_req=True)
>>> result = thread.get()
:param async_req bool
:param str id: ID of the resource (required)
:return: list[ProjectSettingsMetadata]
If the method is called asynchronously,
returns the request thread.
"""
kwargs['_return_http_data_only'] = True
if kwargs.get('async_req'):
return self.custom_action_response_descriptor_octopus_server_web_api_actions_projects_project_settings_metadata_responder_with_http_info(id, **kwargs) # noqa: E501
else:
(data) = self.custom_action_response_descriptor_octopus_server_web_api_actions_projects_project_settings_metadata_responder_with_http_info(id, **kwargs) # noqa: E501
return data
def custom_action_response_descriptor_octopus_server_web_api_actions_projects_project_settings_metadata_responder_with_http_info(self, id, **kwargs): # noqa: E501
"""custom_action_response_descriptor_octopus_server_web_api_actions_projects_project_settings_metadata_responder # noqa: E501
Gets the custom settings metadata from the extensions. NOTE: This definition is not complete. We will be adding more detail in future releases of Octopus. # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.custom_action_response_descriptor_octopus_server_web_api_actions_projects_project_settings_metadata_responder_with_http_info(id, async_req=True)
>>> result = thread.get()
:param async_req bool
:param str id: ID of the resource (required)
:return: list[ProjectSettingsMetadata]
If the method is called asynchronously,
returns the request thread.
"""
all_params = ['id'] # noqa: E501
all_params.append('async_req')
all_params.append('_return_http_data_only')
all_params.append('_preload_content')
all_params.append('_request_timeout')
params = locals()
for key, val in six.iteritems(params['kwargs']):
if key not in all_params:
raise TypeError(
"Got an unexpected keyword argument '%s'"
" to method custom_action_response_descriptor_octopus_server_web_api_actions_projects_project_settings_metadata_responder" % key
)
params[key] = val
del params['kwargs']
# verify the required parameter 'id' is set
if ('id' not in params or
params['id'] is None):
raise ValueError("Missing the required parameter `id` when calling `custom_action_response_descriptor_octopus_server_web_api_actions_projects_project_settings_metadata_responder`") # noqa: E501
collection_formats = {}
path_params = {}
if 'id' in params:
path_params['id'] = params['id'] # noqa: E501
query_params = []
header_params = {}
form_params = []
local_var_files = {}
body_params = None
# HTTP header `Accept`
header_params['Accept'] = self.api_client.select_header_accept(
['application/json']) # noqa: E501
# Authentication setting
auth_settings = ['APIKeyHeader', 'APIKeyQuery', 'NugetApiKeyHeader'] # noqa: E501
return self.api_client.call_api(
'/api/projects/{id}/metadata', 'GET',
path_params,
query_params,
header_params,
body=body_params,
post_params=form_params,
files=local_var_files,
response_type='list[ProjectSettingsMetadata]', # noqa: E501
auth_settings=auth_settings,
async_req=params.get('async_req'),
_return_http_data_only=params.get('_return_http_data_only'),
_preload_content=params.get('_preload_content', True),
_request_timeout=params.get('_request_timeout'),
collection_formats=collection_formats)
def custom_action_response_descriptor_octopus_server_web_api_actions_projects_project_settings_metadata_responder_spaces(self, base_space_id, id, **kwargs): # noqa: E501
"""custom_action_response_descriptor_octopus_server_web_api_actions_projects_project_settings_metadata_responder_spaces # noqa: E501
Gets the custom settings metadata from the extensions. NOTE: This definition is not complete. We will be adding more detail in future releases of Octopus. # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.custom_action_response_descriptor_octopus_server_web_api_actions_projects_project_settings_metadata_responder_spaces(base_space_id, id, async_req=True)
>>> result = thread.get()
:param async_req bool
:param str base_space_id: ID of the space (required)
:param str id: ID of the resource (required)
:return: list[ProjectSettingsMetadata]
If the method is called asynchronously,
returns the request thread.
"""
kwargs['_return_http_data_only'] = True
if kwargs.get('async_req'):
return self.custom_action_response_descriptor_octopus_server_web_api_actions_projects_project_settings_metadata_responder_spaces_with_http_info(base_space_id, id, **kwargs) # noqa: E501
else:
(data) = self.custom_action_response_descriptor_octopus_server_web_api_actions_projects_project_settings_metadata_responder_spaces_with_http_info(base_space_id, id, **kwargs) # noqa: E501
return data
def custom_action_response_descriptor_octopus_server_web_api_actions_projects_project_settings_metadata_responder_spaces_with_http_info(self, base_space_id, id, **kwargs): # noqa: E501
"""custom_action_response_descriptor_octopus_server_web_api_actions_projects_project_settings_metadata_responder_spaces # noqa: E501
Gets the custom settings metadata from the extensions. NOTE: This definition is not complete. We will be adding more detail in future releases of Octopus. # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.custom_action_response_descriptor_octopus_server_web_api_actions_projects_project_settings_metadata_responder_spaces_with_http_info(base_space_id, id, async_req=True)
>>> result = thread.get()
:param async_req bool
:param str base_space_id: ID of the space (required)
:param str id: ID of the resource (required)
:return: list[ProjectSettingsMetadata]
If the method is called asynchronously,
returns the request thread.
"""
all_params = ['base_space_id', 'id'] # noqa: E501
all_params.append('async_req')
all_params.append('_return_http_data_only')
all_params.append('_preload_content')
all_params.append('_request_timeout')
params = locals()
for key, val in six.iteritems(params['kwargs']):
if key not in all_params:
raise TypeError(
"Got an unexpected keyword argument '%s'"
" to method custom_action_response_descriptor_octopus_server_web_api_actions_projects_project_settings_metadata_responder_spaces" % key
)
params[key] = val
del params['kwargs']
# verify the required parameter 'base_space_id' is set
if ('base_space_id' not in params or
params['base_space_id'] is None):
raise ValueError("Missing the required parameter `base_space_id` when calling `custom_action_response_descriptor_octopus_server_web_api_actions_projects_project_settings_metadata_responder_spaces`") # noqa: E501
# verify the required parameter 'id' is set
if ('id' not in params or
params['id'] is None):
raise ValueError("Missing the required parameter `id` when calling `custom_action_response_descriptor_octopus_server_web_api_actions_projects_project_settings_metadata_responder_spaces`") # noqa: E501
collection_formats = {}
path_params = {}
if 'base_space_id' in params:
path_params['baseSpaceId'] = params['base_space_id'] # noqa: E501
if 'id' in params:
path_params['id'] = params['id'] # noqa: E501
query_params = []
header_params = {}
form_params = []
local_var_files = {}
body_params = None
# HTTP header `Accept`
header_params['Accept'] = self.api_client.select_header_accept(
['application/json']) # noqa: E501
# Authentication setting
auth_settings = ['APIKeyHeader', 'APIKeyQuery', 'NugetApiKeyHeader'] # noqa: E501
return self.api_client.call_api(
'/api/{baseSpaceId}/projects/{id}/metadata', 'GET',
path_params,
query_params,
header_params,
body=body_params,
post_params=form_params,
files=local_var_files,
response_type='list[ProjectSettingsMetadata]', # noqa: E501
auth_settings=auth_settings,
async_req=params.get('async_req'),
_return_http_data_only=params.get('_return_http_data_only'),
_preload_content=params.get('_preload_content', True),
_request_timeout=params.get('_request_timeout'),
collection_formats=collection_formats)
def delete_on_background_response_descriptor_projects_project_project_resource(self, id, **kwargs): # noqa: E501
"""Delete a ProjectResource by ID # noqa: E501
Deletes an existing project. # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.delete_on_background_response_descriptor_projects_project_project_resource(id, async_req=True)
>>> result = thread.get()
:param async_req bool
:param str id: ID of the ProjectResource to delete (required)
:return: TaskResource
If the method is called asynchronously,
returns the request thread.
"""
kwargs['_return_http_data_only'] = True
if kwargs.get('async_req'):
return self.delete_on_background_response_descriptor_projects_project_project_resource_with_http_info(id, **kwargs) # noqa: E501
else:
(data) = self.delete_on_background_response_descriptor_projects_project_project_resource_with_http_info(id, **kwargs) # noqa: E501
return data
def delete_on_background_response_descriptor_projects_project_project_resource_with_http_info(self, id, **kwargs): # noqa: E501
"""Delete a ProjectResource by ID # noqa: E501
Deletes an existing project. # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.delete_on_background_response_descriptor_projects_project_project_resource_with_http_info(id, async_req=True)
>>> result = thread.get()
:param async_req bool
:param str id: ID of the ProjectResource to delete (required)
:return: TaskResource
If the method is called asynchronously,
returns the request thread.
"""
all_params = ['id'] # noqa: E501
all_params.append('async_req')
all_params.append('_return_http_data_only')
all_params.append('_preload_content')
all_params.append('_request_timeout')
params = locals()
for key, val in six.iteritems(params['kwargs']):
if key not in all_params:
raise TypeError(
"Got an unexpected keyword argument '%s'"
" to method delete_on_background_response_descriptor_projects_project_project_resource" % key
)
params[key] = val
del params['kwargs']
# verify the required parameter 'id' is set
if ('id' not in params or
params['id'] is None):
raise ValueError("Missing the required parameter `id` when calling `delete_on_background_response_descriptor_projects_project_project_resource`") # noqa: E501
collection_formats = {}
path_params = {}
if 'id' in params:
path_params['id'] = params['id'] # noqa: E501
query_params = []
header_params = {}
form_params = []
local_var_files = {}
body_params = None
# HTTP header `Accept`
header_params['Accept'] = self.api_client.select_header_accept(
['application/json']) # noqa: E501
# Authentication setting
auth_settings = ['APIKeyHeader', 'APIKeyQuery', 'NugetApiKeyHeader'] # noqa: E501
return self.api_client.call_api(
'/api/projects/{id}', 'DELETE',
path_params,
query_params,
header_params,
body=body_params,
post_params=form_params,
files=local_var_files,
response_type='TaskResource', # noqa: E501
auth_settings=auth_settings,
async_req=params.get('async_req'),
_return_http_data_only=params.get('_return_http_data_only'),
_preload_content=params.get('_preload_content', True),
_request_timeout=params.get('_request_timeout'),
collection_formats=collection_formats)
def delete_on_background_response_descriptor_projects_project_project_resource_spaces(self, base_space_id, id, **kwargs): # noqa: E501
"""Delete a ProjectResource by ID # noqa: E501
Deletes an existing project. # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.delete_on_background_response_descriptor_projects_project_project_resource_spaces(base_space_id, id, async_req=True)
>>> result = thread.get()
:param async_req bool
:param str base_space_id: ID of the space (required)
:param str id: ID of the ProjectResource to delete (required)
:return: TaskResource
If the method is called asynchronously,
returns the request thread.
"""
kwargs['_return_http_data_only'] = True
if kwargs.get('async_req'):
return self.delete_on_background_response_descriptor_projects_project_project_resource_spaces_with_http_info(base_space_id, id, **kwargs) # noqa: E501
else:
(data) = self.delete_on_background_response_descriptor_projects_project_project_resource_spaces_with_http_info(base_space_id, id, **kwargs) # noqa: E501
return data
def delete_on_background_response_descriptor_projects_project_project_resource_spaces_with_http_info(self, base_space_id, id, **kwargs): # noqa: E501
"""Delete a ProjectResource by ID # noqa: E501
Deletes an existing project. # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.delete_on_background_response_descriptor_projects_project_project_resource_spaces_with_http_info(base_space_id, id, async_req=True)
>>> result = thread.get()
:param async_req bool
:param str base_space_id: ID of the space (required)
:param str id: ID of the ProjectResource to delete (required)
:return: TaskResource
If the method is called asynchronously,
returns the request thread.
"""
all_params = ['base_space_id', 'id'] # noqa: E501
all_params.append('async_req')
all_params.append('_return_http_data_only')
all_params.append('_preload_content')
all_params.append('_request_timeout')
params = locals()
for key, val in six.iteritems(params['kwargs']):
if key not in all_params:
raise TypeError(
"Got an unexpected keyword argument '%s'"
" to method delete_on_background_response_descriptor_projects_project_project_resource_spaces" % key
)
params[key] = val
del params['kwargs']
# verify the required parameter 'base_space_id' is set
if ('base_space_id' not in params or
params['base_space_id'] is None):
raise ValueError("Missing the required parameter `base_space_id` when calling `delete_on_background_response_descriptor_projects_project_project_resource_spaces`") # noqa: E501
# verify the required parameter 'id' is set
if ('id' not in params or
params['id'] is None):
raise ValueError("Missing the required parameter `id` when calling `delete_on_background_response_descriptor_projects_project_project_resource_spaces`") # noqa: E501
collection_formats = {}
path_params = {}
if 'base_space_id' in params:
path_params['baseSpaceId'] = params['base_space_id'] # noqa: E501
if 'id' in params:
path_params['id'] = params['id'] # noqa: E501
query_params = []
header_params = {}
form_params = []
local_var_files = {}
body_params = None
# HTTP header `Accept`
header_params['Accept'] = self.api_client.select_header_accept(
['application/json']) # noqa: E501
# Authentication setting
auth_settings = ['APIKeyHeader', 'APIKeyQuery', 'NugetApiKeyHeader'] # noqa: E501
return self.api_client.call_api(
'/api/{baseSpaceId}/projects/{id}', 'DELETE',
path_params,
query_params,
header_params,
body=body_params,
post_params=form_params,
files=local_var_files,
response_type='TaskResource', # noqa: E501
auth_settings=auth_settings,
async_req=params.get('async_req'),
_return_http_data_only=params.get('_return_http_data_only'),
_preload_content=params.get('_preload_content', True),
_request_timeout=params.get('_request_timeout'),
collection_formats=collection_formats)
def index_response_descriptor_projects_project_project_resource(self, **kwargs): # noqa: E501
"""Get a list of ProjectResources # noqa: E501
Lists all of the projects in the supplied Octopus Deploy Space, from all project groups. The results will be sorted alphabetically by name. # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.index_response_descriptor_projects_project_project_resource(async_req=True)
>>> result = thread.get()
:param async_req bool
:param int skip: Number of items to skip
:param int take: Number of items to take
:return: ResourceCollectionProjectResource
If the method is called asynchronously,
returns the request thread.
"""
kwargs['_return_http_data_only'] = True
if kwargs.get('async_req'):
return self.index_response_descriptor_projects_project_project_resource_with_http_info(**kwargs) # noqa: E501
else:
(data) = self.index_response_descriptor_projects_project_project_resource_with_http_info(**kwargs) # noqa: E501
return data
def index_response_descriptor_projects_project_project_resource_with_http_info(self, **kwargs): # noqa: E501
"""Get a list of ProjectResources # noqa: E501
Lists all of the projects in the supplied Octopus Deploy Space, from all project groups. The results will be sorted alphabetically by name. # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.index_response_descriptor_projects_project_project_resource_with_http_info(async_req=True)
>>> result = thread.get()
:param async_req bool
:param int skip: Number of items to skip
:param int take: Number of items to take
:return: ResourceCollectionProjectResource
If the method is called asynchronously,
returns the request thread.
"""
all_params = ['skip', 'take'] # noqa: E501
all_params.append('async_req')
all_params.append('_return_http_data_only')
all_params.append('_preload_content')
all_params.append('_request_timeout')
params = locals()
for key, val in six.iteritems(params['kwargs']):
if key not in all_params:
raise TypeError(
"Got an unexpected keyword argument '%s'"
" to method index_response_descriptor_projects_project_project_resource" % key
)
params[key] = val
del params['kwargs']
collection_formats = {}
path_params = {}
query_params = []
if 'skip' in params:
query_params.append(('skip', params['skip'])) # noqa: E501
if 'take' in params:
query_params.append(('take', params['take'])) # noqa: E501
header_params = {}
form_params = []
local_var_files = {}
body_params = None
# HTTP header `Accept`
header_params['Accept'] = self.api_client.select_header_accept(
['application/json']) # noqa: E501
# Authentication setting
auth_settings = ['APIKeyHeader', 'APIKeyQuery', 'NugetApiKeyHeader'] # noqa: E501
return self.api_client.call_api(
'/api/projects', 'GET',
path_params,
query_params,
header_params,
body=body_params,
post_params=form_params,
files=local_var_files,
response_type='ResourceCollectionProjectResource', # noqa: E501
auth_settings=auth_settings,
async_req=params.get('async_req'),
_return_http_data_only=params.get('_return_http_data_only'),
_preload_content=params.get('_preload_content', True),
_request_timeout=params.get('_request_timeout'),
collection_formats=collection_formats)
def index_response_descriptor_projects_project_project_resource_spaces(self, base_space_id, **kwargs): # noqa: E501
"""Get a list of ProjectResources # noqa: E501
Lists all of the projects in the supplied Octopus Deploy Space, from all project groups. The results will be sorted alphabetically by name. # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.index_response_descriptor_projects_project_project_resource_spaces(base_space_id, async_req=True)
>>> result = thread.get()
:param async_req bool
:param str base_space_id: ID of the space (required)
:param int skip: Number of items to skip
:param int take: Number of items to take
:return: ResourceCollectionProjectResource
If the method is called asynchronously,
returns the request thread.
"""
kwargs['_return_http_data_only'] = True
if kwargs.get('async_req'):
return self.index_response_descriptor_projects_project_project_resource_spaces_with_http_info(base_space_id, **kwargs) # noqa: E501
else:
(data) = self.index_response_descriptor_projects_project_project_resource_spaces_with_http_info(base_space_id, **kwargs) # noqa: E501
return data
def index_response_descriptor_projects_project_project_resource_spaces_with_http_info(self, base_space_id, **kwargs): # noqa: E501
"""Get a list of ProjectResources # noqa: E501
Lists all of the projects in the supplied Octopus Deploy Space, from all project groups. The results will be sorted alphabetically by name. # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.index_response_descriptor_projects_project_project_resource_spaces_with_http_info(base_space_id, async_req=True)
>>> result = thread.get()
:param async_req bool
:param str base_space_id: ID of the space (required)
:param int skip: Number of items to skip
:param int take: Number of items to take
:return: ResourceCollectionProjectResource
If the method is called asynchronously,
returns the request thread.
"""
all_params = ['base_space_id', 'skip', 'take'] # noqa: E501
all_params.append('async_req')
all_params.append('_return_http_data_only')
all_params.append('_preload_content')
all_params.append('_request_timeout')
params = locals()
for key, val in six.iteritems(params['kwargs']):
if key not in all_params:
raise TypeError(
"Got an unexpected keyword argument '%s'"
" to method index_response_descriptor_projects_project_project_resource_spaces" % key
)
params[key] = val
del params['kwargs']
# verify the required parameter 'base_space_id' is set
if ('base_space_id' not in params or
params['base_space_id'] is None):
raise ValueError("Missing the required parameter `base_space_id` when calling `index_response_descriptor_projects_project_project_resource_spaces`") # noqa: E501
collection_formats = {}
path_params = {}
if 'base_space_id' in params:
path_params['baseSpaceId'] = params['base_space_id'] # noqa: E501
query_params = []
if 'skip' in params:
query_params.append(('skip', params['skip'])) # noqa: E501
if 'take' in params:
query_params.append(('take', params['take'])) # noqa: E501
header_params = {}
form_params = []
local_var_files = {}
body_params = None
# HTTP header `Accept`
header_params['Accept'] = self.api_client.select_header_accept(
['application/json']) # noqa: E501
# Authentication setting
auth_settings = ['APIKeyHeader', 'APIKeyQuery', 'NugetApiKeyHeader'] # noqa: E501
return self.api_client.call_api(
'/api/{baseSpaceId}/projects', 'GET',
path_params,
query_params,
header_params,
body=body_params,
post_params=form_params,
files=local_var_files,
response_type='ResourceCollectionProjectResource', # noqa: E501
auth_settings=auth_settings,
async_req=params.get('async_req'),
_return_http_data_only=params.get('_return_http_data_only'),
_preload_content=params.get('_preload_content', True),
_request_timeout=params.get('_request_timeout'),
collection_formats=collection_formats)
def list_all_response_descriptor_projects_project_project_resource(self, **kwargs): # noqa: E501
"""Get a list of ProjectResources # noqa: E501
Lists the name and ID of all of the projects in the supplied Octopus Deploy Space. The results will be sorted alphabetically by name. # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.list_all_response_descriptor_projects_project_project_resource(async_req=True)
>>> result = thread.get()
:param async_req bool
:return: list[ProjectResource]
If the method is called asynchronously,
returns the request thread.
"""
kwargs['_return_http_data_only'] = True
if kwargs.get('async_req'):
return self.list_all_response_descriptor_projects_project_project_resource_with_http_info(**kwargs) # noqa: E501
else:
(data) = self.list_all_response_descriptor_projects_project_project_resource_with_http_info(**kwargs) # noqa: E501
return data
def list_all_response_descriptor_projects_project_project_resource_with_http_info(self, **kwargs): # noqa: E501
"""Get a list of ProjectResources # noqa: E501
Lists the name and ID of all of the projects in the supplied Octopus Deploy Space. The results will be sorted alphabetically by name. # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.list_all_response_descriptor_projects_project_project_resource_with_http_info(async_req=True)
>>> result = thread.get()
:param async_req bool
:return: list[ProjectResource]
If the method is called asynchronously,
returns the request thread.
"""
all_params = [] # noqa: E501
all_params.append('async_req')
all_params.append('_return_http_data_only')
all_params.append('_preload_content')
all_params.append('_request_timeout')
params = locals()
for key, val in six.iteritems(params['kwargs']):
if key not in all_params:
raise TypeError(
"Got an unexpected keyword argument '%s'"
" to method list_all_response_descriptor_projects_project_project_resource" % key
)
params[key] = val
del params['kwargs']
collection_formats = {}
path_params = {}
query_params = []
header_params = {}
form_params = []
local_var_files = {}
body_params = None
# HTTP header `Accept`
header_params['Accept'] = self.api_client.select_header_accept(
['application/json']) # noqa: E501
# Authentication setting
auth_settings = ['APIKeyHeader', 'APIKeyQuery', 'NugetApiKeyHeader'] # noqa: E501
return self.api_client.call_api(
'/api/projects/all', 'GET',
path_params,
query_params,
header_params,
body=body_params,
post_params=form_params,
files=local_var_files,
response_type='list[ProjectResource]', # noqa: E501
auth_settings=auth_settings,
async_req=params.get('async_req'),
_return_http_data_only=params.get('_return_http_data_only'),
_preload_content=params.get('_preload_content', True),
_request_timeout=params.get('_request_timeout'),
collection_formats=collection_formats)
def list_all_response_descriptor_projects_project_project_resource_spaces(self, base_space_id, **kwargs): # noqa: E501
"""Get a list of ProjectResources # noqa: E501
Lists the name and ID of all of the projects in the supplied Octopus Deploy Space. The results will be sorted alphabetically by name. # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.list_all_response_descriptor_projects_project_project_resource_spaces(base_space_id, async_req=True)
>>> result = thread.get()
:param async_req bool
:param str base_space_id: ID of the space (required)
:return: list[ProjectResource]
If the method is called asynchronously,
returns the request thread.
"""
kwargs['_return_http_data_only'] = True
if kwargs.get('async_req'):
return self.list_all_response_descriptor_projects_project_project_resource_spaces_with_http_info(base_space_id, **kwargs) # noqa: E501
else:
(data) = self.list_all_response_descriptor_projects_project_project_resource_spaces_with_http_info(base_space_id, **kwargs) # noqa: E501
return data
def list_all_response_descriptor_projects_project_project_resource_spaces_with_http_info(self, base_space_id, **kwargs): # noqa: E501
"""Get a list of ProjectResources # noqa: E501
Lists the name and ID of all of the projects in the supplied Octopus Deploy Space. The results will be sorted alphabetically by name. # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.list_all_response_descriptor_projects_project_project_resource_spaces_with_http_info(base_space_id, async_req=True)
>>> result = thread.get()
:param async_req bool
:param str base_space_id: ID of the space (required)
:return: list[ProjectResource]
If the method is called asynchronously,
returns the request thread.
"""
all_params = ['base_space_id'] # noqa: E501
all_params.append('async_req')
all_params.append('_return_http_data_only')
all_params.append('_preload_content')
all_params.append('_request_timeout')
params = locals()
for key, val in six.iteritems(params['kwargs']):
if key not in all_params:
raise TypeError(
"Got an unexpected keyword argument '%s'"
" to method list_all_response_descriptor_projects_project_project_resource_spaces" % key
)
params[key] = val
del params['kwargs']
# verify the required parameter 'base_space_id' is set
if ('base_space_id' not in params or
params['base_space_id'] is None):
raise ValueError("Missing the required parameter `base_space_id` when calling `list_all_response_descriptor_projects_project_project_resource_spaces`") # noqa: E501
collection_formats = {}
path_params = {}
if 'base_space_id' in params:
path_params['baseSpaceId'] = params['base_space_id'] # noqa: E501
query_params = []
header_params = {}
form_params = []
local_var_files = {}
body_params = None
# HTTP header `Accept`
header_params['Accept'] = self.api_client.select_header_accept(
['application/json']) # noqa: E501
# Authentication setting
auth_settings = ['APIKeyHeader', 'APIKeyQuery', 'NugetApiKeyHeader'] # noqa: E501
return self.api_client.call_api(
'/api/{baseSpaceId}/projects/all', 'GET',
path_params,
query_params,
header_params,
body=body_params,
post_params=form_params,
files=local_var_files,
response_type='list[ProjectResource]', # noqa: E501
auth_settings=auth_settings,
async_req=params.get('async_req'),
_return_http_data_only=params.get('_return_http_data_only'),
_preload_content=params.get('_preload_content', True),
_request_timeout=params.get('_request_timeout'),
collection_formats=collection_formats)
def load_by_id_or_slug_response_descriptor_projects_project_project_resource(self, id_or_slug, **kwargs): # noqa: E501
"""Get a ProjectResource by ID or slug # noqa: E501
Gets a single project by ID or Slug. # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.load_by_id_or_slug_response_descriptor_projects_project_project_resource(id_or_slug, async_req=True)
>>> result = thread.get()
:param async_req bool
:param str id_or_slug: ID or slug of the ProjectResource to load (required)
:return: ProjectResource
If the method is called asynchronously,
returns the request thread.
"""
kwargs['_return_http_data_only'] = True
if kwargs.get('async_req'):
return self.load_by_id_or_slug_response_descriptor_projects_project_project_resource_with_http_info(id_or_slug, **kwargs) # noqa: E501
else:
(data) = self.load_by_id_or_slug_response_descriptor_projects_project_project_resource_with_http_info(id_or_slug, **kwargs) # noqa: E501
return data
def load_by_id_or_slug_response_descriptor_projects_project_project_resource_with_http_info(self, id_or_slug, **kwargs): # noqa: E501
"""Get a ProjectResource by ID or slug # noqa: E501
Gets a single project by ID or Slug. # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.load_by_id_or_slug_response_descriptor_projects_project_project_resource_with_http_info(id_or_slug, async_req=True)
>>> result = thread.get()
:param async_req bool
:param str id_or_slug: ID or slug of the ProjectResource to load (required)
:return: ProjectResource
If the method is called asynchronously,
returns the request thread.
"""
all_params = ['id_or_slug'] # noqa: E501
all_params.append('async_req')
all_params.append('_return_http_data_only')
all_params.append('_preload_content')
all_params.append('_request_timeout')
params = locals()
for key, val in six.iteritems(params['kwargs']):
if key not in all_params:
raise TypeError(
"Got an unexpected keyword argument '%s'"
" to method load_by_id_or_slug_response_descriptor_projects_project_project_resource" % key
)
params[key] = val
del params['kwargs']
# verify the required parameter 'id_or_slug' is set
if ('id_or_slug' not in params or
params['id_or_slug'] is None):
raise ValueError("Missing the required parameter `id_or_slug` when calling `load_by_id_or_slug_response_descriptor_projects_project_project_resource`") # noqa: E501
collection_formats = {}
path_params = {}
if 'id_or_slug' in params:
path_params['idOrSlug'] = params['id_or_slug'] # noqa: E501
query_params = []
header_params = {}
form_params = []
local_var_files = {}
body_params = None
# HTTP header `Accept`
header_params['Accept'] = self.api_client.select_header_accept(
['application/json']) # noqa: E501
# Authentication setting
auth_settings = ['APIKeyHeader', 'APIKeyQuery', 'NugetApiKeyHeader'] # noqa: E501
return self.api_client.call_api(
'/api/projects/{idOrSlug}', 'GET',
path_params,
query_params,
header_params,
body=body_params,
post_params=form_params,
files=local_var_files,
response_type='ProjectResource', # noqa: E501
auth_settings=auth_settings,
async_req=params.get('async_req'),
_return_http_data_only=params.get('_return_http_data_only'),
_preload_content=params.get('_preload_content', True),
_request_timeout=params.get('_request_timeout'),
collection_formats=collection_formats)
def load_by_id_or_slug_response_descriptor_projects_project_project_resource_spaces(self, base_space_id, id_or_slug, **kwargs): # noqa: E501
"""Get a ProjectResource by ID or slug # noqa: E501
Gets a single project by ID or Slug. # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.load_by_id_or_slug_response_descriptor_projects_project_project_resource_spaces(base_space_id, id_or_slug, async_req=True)
>>> result = thread.get()
:param async_req bool
:param str base_space_id: ID of the space (required)
:param str id_or_slug: ID or slug of the ProjectResource to load (required)
:return: ProjectResource
If the method is called asynchronously,
returns the request thread.
"""
kwargs['_return_http_data_only'] = True
if kwargs.get('async_req'):
return self.load_by_id_or_slug_response_descriptor_projects_project_project_resource_spaces_with_http_info(base_space_id, id_or_slug, **kwargs) # noqa: E501
else:
(data) = self.load_by_id_or_slug_response_descriptor_projects_project_project_resource_spaces_with_http_info(base_space_id, id_or_slug, **kwargs) # noqa: E501
return data
def load_by_id_or_slug_response_descriptor_projects_project_project_resource_spaces_with_http_info(self, base_space_id, id_or_slug, **kwargs): # noqa: E501
"""Get a ProjectResource by ID or slug # noqa: E501
Gets a single project by ID or Slug. # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.load_by_id_or_slug_response_descriptor_projects_project_project_resource_spaces_with_http_info(base_space_id, id_or_slug, async_req=True)
>>> result = thread.get()
:param async_req bool
:param str base_space_id: ID of the space (required)
:param str id_or_slug: ID or slug of the ProjectResource to load (required)
:return: ProjectResource
If the method is called asynchronously,
returns the request thread.
"""
all_params = ['base_space_id', 'id_or_slug'] # noqa: E501
all_params.append('async_req')
all_params.append('_return_http_data_only')
all_params.append('_preload_content')
all_params.append('_request_timeout')
params = locals()
for key, val in six.iteritems(params['kwargs']):
if key not in all_params:
raise TypeError(
"Got an unexpected keyword argument '%s'"
" to method load_by_id_or_slug_response_descriptor_projects_project_project_resource_spaces" % key
)
params[key] = val
del params['kwargs']
# verify the required parameter 'base_space_id' is set
if ('base_space_id' not in params or
params['base_space_id'] is None):
raise ValueError("Missing the required parameter `base_space_id` when calling `load_by_id_or_slug_response_descriptor_projects_project_project_resource_spaces`") # noqa: E501
# verify the required parameter 'id_or_slug' is set
if ('id_or_slug' not in params or
params['id_or_slug'] is None):
raise ValueError("Missing the required parameter `id_or_slug` when calling `load_by_id_or_slug_response_descriptor_projects_project_project_resource_spaces`") # noqa: E501
collection_formats = {}
path_params = {}
if 'base_space_id' in params:
path_params['baseSpaceId'] = params['base_space_id'] # noqa: E501
if 'id_or_slug' in params:
path_params['idOrSlug'] = params['id_or_slug'] # noqa: E501
query_params = []
header_params = {}
form_params = []
local_var_files = {}
body_params = None
# HTTP header `Accept`
header_params['Accept'] = self.api_client.select_header_accept(
['application/json']) # noqa: E501
# Authentication setting
auth_settings = ['APIKeyHeader', 'APIKeyQuery', 'NugetApiKeyHeader'] # noqa: E501
return self.api_client.call_api(
'/api/{baseSpaceId}/projects/{idOrSlug}', 'GET',
path_params,
query_params,
header_params,
body=body_params,
post_params=form_params,
files=local_var_files,
response_type='ProjectResource', # noqa: E501
auth_settings=auth_settings,
async_req=params.get('async_req'),
_return_http_data_only=params.get('_return_http_data_only'),
_preload_content=params.get('_preload_content', True),
_request_timeout=params.get('_request_timeout'),
collection_formats=collection_formats)
def modify_response_descriptor_projects_project_project_resource(self, id, **kwargs): # noqa: E501
"""Modify a ProjectResource by ID # noqa: E501
Modifies an existing project. # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.modify_response_descriptor_projects_project_project_resource(id, async_req=True)
>>> result = thread.get()
:param async_req bool
:param str id: ID of the ProjectResource to modify (required)
:param ProjectResource project_resource: The ProjectResource resource to create
:return: ProjectResource
If the method is called asynchronously,
returns the request thread.
"""
kwargs['_return_http_data_only'] = True
if kwargs.get('async_req'):
return self.modify_response_descriptor_projects_project_project_resource_with_http_info(id, **kwargs) # noqa: E501
else:
(data) = self.modify_response_descriptor_projects_project_project_resource_with_http_info(id, **kwargs) # noqa: E501
return data
def modify_response_descriptor_projects_project_project_resource_with_http_info(self, id, **kwargs): # noqa: E501
"""Modify a ProjectResource by ID # noqa: E501
Modifies an existing project. # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.modify_response_descriptor_projects_project_project_resource_with_http_info(id, async_req=True)
>>> result = thread.get()
:param async_req bool
:param str id: ID of the ProjectResource to modify (required)
:param ProjectResource project_resource: The ProjectResource resource to create
:return: ProjectResource
If the method is called asynchronously,
returns the request thread.
"""
all_params = ['id', 'project_resource'] # noqa: E501
all_params.append('async_req')
all_params.append('_return_http_data_only')
all_params.append('_preload_content')
all_params.append('_request_timeout')
params = locals()
for key, val in six.iteritems(params['kwargs']):
if key not in all_params:
raise TypeError(
"Got an unexpected keyword argument '%s'"
" to method modify_response_descriptor_projects_project_project_resource" % key
)
params[key] = val
del params['kwargs']
# verify the required parameter 'id' is set
if ('id' not in params or
params['id'] is None):
raise ValueError("Missing the required parameter `id` when calling `modify_response_descriptor_projects_project_project_resource`") # noqa: E501
collection_formats = {}
path_params = {}
if 'id' in params:
path_params['id'] = params['id'] # noqa: E501
query_params = []
header_params = {}
form_params = []
local_var_files = {}
body_params = None
if 'project_resource' in params:
body_params = params['project_resource']
# HTTP header `Accept`
header_params['Accept'] = self.api_client.select_header_accept(
['application/json']) # noqa: E501
# Authentication setting
auth_settings = ['APIKeyHeader', 'APIKeyQuery', 'NugetApiKeyHeader'] # noqa: E501
return self.api_client.call_api(
'/api/projects/{id}', 'PUT',
path_params,
query_params,
header_params,
body=body_params,
post_params=form_params,
files=local_var_files,
response_type='ProjectResource', # noqa: E501
auth_settings=auth_settings,
async_req=params.get('async_req'),
_return_http_data_only=params.get('_return_http_data_only'),
_preload_content=params.get('_preload_content', True),
_request_timeout=params.get('_request_timeout'),
collection_formats=collection_formats)
def modify_response_descriptor_projects_project_project_resource_spaces(self, base_space_id, id, **kwargs): # noqa: E501
"""Modify a ProjectResource by ID # noqa: E501
Modifies an existing project. # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.modify_response_descriptor_projects_project_project_resource_spaces(base_space_id, id, async_req=True)
>>> result = thread.get()
:param async_req bool
:param str base_space_id: ID of the space (required)
:param str id: ID of the ProjectResource to modify (required)
:param ProjectResource project_resource: The ProjectResource resource to create
:return: ProjectResource
If the method is called asynchronously,
returns the request thread.
"""
kwargs['_return_http_data_only'] = True
if kwargs.get('async_req'):
return self.modify_response_descriptor_projects_project_project_resource_spaces_with_http_info(base_space_id, id, **kwargs) # noqa: E501
else:
(data) = self.modify_response_descriptor_projects_project_project_resource_spaces_with_http_info(base_space_id, id, **kwargs) # noqa: E501
return data
def modify_response_descriptor_projects_project_project_resource_spaces_with_http_info(self, base_space_id, id, **kwargs): # noqa: E501
"""Modify a ProjectResource by ID # noqa: E501
Modifies an existing project. # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.modify_response_descriptor_projects_project_project_resource_spaces_with_http_info(base_space_id, id, async_req=True)
>>> result = thread.get()
:param async_req bool
:param str base_space_id: ID of the space (required)
:param str id: ID of the ProjectResource to modify (required)
:param ProjectResource project_resource: The ProjectResource resource to create
:return: ProjectResource
If the method is called asynchronously,
returns the request thread.
"""
all_params = ['base_space_id', 'id', 'project_resource'] # noqa: E501
all_params.append('async_req')
all_params.append('_return_http_data_only')
all_params.append('_preload_content')
all_params.append('_request_timeout')
params = locals()
for key, val in six.iteritems(params['kwargs']):
if key not in all_params:
raise TypeError(
"Got an unexpected keyword argument '%s'"
" to method modify_response_descriptor_projects_project_project_resource_spaces" % key
)
params[key] = val
del params['kwargs']
# verify the required parameter 'base_space_id' is set
if ('base_space_id' not in params or
params['base_space_id'] is None):
raise ValueError("Missing the required parameter `base_space_id` when calling `modify_response_descriptor_projects_project_project_resource_spaces`") # noqa: E501
# verify the required parameter 'id' is set
if ('id' not in params or
params['id'] is None):
raise ValueError("Missing the required parameter `id` when calling `modify_response_descriptor_projects_project_project_resource_spaces`") # noqa: E501
collection_formats = {}
path_params = {}
if 'base_space_id' in params:
path_params['baseSpaceId'] = params['base_space_id'] # noqa: E501
if 'id' in params:
path_params['id'] = params['id'] # noqa: E501
query_params = []
header_params = {}
form_params = []
local_var_files = {}
body_params = None
if 'project_resource' in params:
body_params = params['project_resource']
# HTTP header `Accept`
header_params['Accept'] = self.api_client.select_header_accept(
['application/json']) # noqa: E501
# Authentication setting
auth_settings = ['APIKeyHeader', 'APIKeyQuery', 'NugetApiKeyHeader'] # noqa: E501
return self.api_client.call_api(
'/api/{baseSpaceId}/projects/{id}', 'PUT',
path_params,
query_params,
header_params,
body=body_params,
post_params=form_params,
files=local_var_files,
response_type='ProjectResource', # noqa: E501
auth_settings=auth_settings,
async_req=params.get('async_req'),
_return_http_data_only=params.get('_return_http_data_only'),
_preload_content=params.get('_preload_content', True),
_request_timeout=params.get('_request_timeout'),
collection_formats=collection_formats)
| 47.128064
| 224
| 0.663291
| 11,276
| 94,209
| 5.191557
| 0.01951
| 0.040041
| 0.028186
| 0.054117
| 0.99122
| 0.99122
| 0.99122
| 0.989272
| 0.988931
| 0.988709
| 0
| 0.013541
| 0.260028
| 94,209
| 1,998
| 225
| 47.151652
| 0.826199
| 0.344914
| 0
| 0.831336
| 1
| 0
| 0.223019
| 0.0988
| 0
| 0
| 0
| 0
| 0
| 1
| 0.037788
| false
| 0
| 0.003687
| 0
| 0.097696
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 1
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 8
|
df6432a9c47bd3a13d791d14b6be145584fcaac0
| 11,512
|
py
|
Python
|
tests/vfs/tsk_data_stream.py
|
jaegeral/dfvfs
|
606d09bf4de0b5dcf20d1dddff879dfed5c93640
|
[
"Apache-2.0"
] | null | null | null |
tests/vfs/tsk_data_stream.py
|
jaegeral/dfvfs
|
606d09bf4de0b5dcf20d1dddff879dfed5c93640
|
[
"Apache-2.0"
] | null | null | null |
tests/vfs/tsk_data_stream.py
|
jaegeral/dfvfs
|
606d09bf4de0b5dcf20d1dddff879dfed5c93640
|
[
"Apache-2.0"
] | null | null | null |
#!/usr/bin/env python
# -*- coding: utf-8 -*-
"""Tests for the data stream implementation using pytsk3."""
import unittest
import pytsk3
from dfvfs.lib import definitions
from dfvfs.path import factory as path_spec_factory
from dfvfs.resolver import context
from dfvfs.vfs import tsk_data_stream
from dfvfs.vfs import tsk_file_system
from tests import test_lib as shared_test_lib
class TSKDataStreamTestExt2(shared_test_lib.BaseTestCase):
"""Tests the SleuthKit (TSK) data stream on ext2."""
_INODE_ANOTHER_FILE = 15
def setUp(self):
"""Sets up the needed objects used throughout the test."""
self._resolver_context = context.Context()
test_path = self._GetTestFilePath(['ext2.raw'])
self._SkipIfPathNotExists(test_path)
test_os_path_spec = path_spec_factory.Factory.NewPathSpec(
definitions.TYPE_INDICATOR_OS, location=test_path)
self._raw_path_spec = path_spec_factory.Factory.NewPathSpec(
definitions.TYPE_INDICATOR_RAW, parent=test_os_path_spec)
self._tsk_path_spec = path_spec_factory.Factory.NewPathSpec(
definitions.TYPE_INDICATOR_TSK, location='/',
parent=self._raw_path_spec)
self._file_system = tsk_file_system.TSKFileSystem(
self._resolver_context, self._tsk_path_spec)
self._file_system.Open()
def tearDown(self):
"""Cleans up the needed objects used throughout the test."""
self._resolver_context.Empty()
def testName(self):
"""Test the name property."""
path_spec = path_spec_factory.Factory.NewPathSpec(
definitions.TYPE_INDICATOR_EXT, inode=self._INODE_ANOTHER_FILE,
location='/a_directory/another_file', parent=self._raw_path_spec)
file_entry = self._file_system.GetFileEntryByPathSpec(path_spec)
self.assertIsNotNone(file_entry)
test_data_stream = tsk_data_stream.TSKDataStream(file_entry, None)
self.assertEqual(test_data_stream.name, '')
def testGetExtents(self):
"""Test the GetExtents function."""
path_spec = path_spec_factory.Factory.NewPathSpec(
definitions.TYPE_INDICATOR_EXT, inode=self._INODE_ANOTHER_FILE,
location='/a_directory/another_file', parent=self._raw_path_spec)
file_entry = self._file_system.GetFileEntryByPathSpec(path_spec)
self.assertIsNotNone(file_entry)
test_data_stream = tsk_data_stream.TSKDataStream(file_entry, None)
extents = test_data_stream.GetExtents()
self.assertEqual(len(extents), 1)
self.assertEqual(extents[0].extent_type, definitions.EXTENT_TYPE_DATA)
self.assertEqual(extents[0].offset, 527360)
self.assertEqual(extents[0].size, 1024)
def testIsDefault(self):
"""Test the IsDefault function."""
path_spec = path_spec_factory.Factory.NewPathSpec(
definitions.TYPE_INDICATOR_EXT, inode=self._INODE_ANOTHER_FILE,
location='/a_directory/another_file', parent=self._raw_path_spec)
file_entry = self._file_system.GetFileEntryByPathSpec(path_spec)
self.assertIsNotNone(file_entry)
test_data_stream = tsk_data_stream.TSKDataStream(file_entry, None)
result = test_data_stream.IsDefault()
self.assertTrue(result)
class TSKDataStreamTestHFSPlus(shared_test_lib.BaseTestCase):
"""Tests the SleuthKit (TSK) data stream on HFS+."""
def setUp(self):
"""Sets up the needed objects used throughout the test."""
self._resolver_context = context.Context()
test_path = self._GetTestFilePath(['hfsplus.raw'])
self._SkipIfPathNotExists(test_path)
test_os_path_spec = path_spec_factory.Factory.NewPathSpec(
definitions.TYPE_INDICATOR_OS, location=test_path)
self._raw_path_spec = path_spec_factory.Factory.NewPathSpec(
definitions.TYPE_INDICATOR_RAW, parent=test_os_path_spec)
self._tsk_path_spec = path_spec_factory.Factory.NewPathSpec(
definitions.TYPE_INDICATOR_TSK, location='/',
parent=self._raw_path_spec)
self._file_system = tsk_file_system.TSKFileSystem(
self._resolver_context, self._tsk_path_spec)
self._file_system.Open()
def tearDown(self):
"""Cleans up the needed objects used throughout the test."""
self._resolver_context.Empty()
def _GetResourceForkAttribute(self, tsk_file):
"""Retrieves an resource fork attribute.
Args:
tsk_file (pytsk3.File): TSK file.
Returns:
pytsk3.Attribute: TSK attribute or None.
"""
for tsk_attribute in tsk_file:
if getattr(tsk_attribute, 'info', None):
attribute_type = getattr(tsk_attribute.info, 'type', None)
if attribute_type == pytsk3.TSK_FS_ATTR_TYPE_HFS_RSRC:
return tsk_attribute
return None
def testName(self):
"""Test the name property."""
path_spec = path_spec_factory.Factory.NewPathSpec(
definitions.TYPE_INDICATOR_TSK, inode=25,
location='/a_directory/a_resourcefork', parent=self._raw_path_spec)
file_entry = self._file_system.GetFileEntryByPathSpec(path_spec)
self.assertIsNotNone(file_entry)
test_data_stream = tsk_data_stream.TSKDataStream(file_entry, None)
self.assertEqual(test_data_stream.name, '')
tsk_file = file_entry.GetTSKFile()
self.assertIsNotNone(tsk_file)
tsk_attribute = self._GetResourceForkAttribute(tsk_file)
self.assertIsNotNone(tsk_attribute)
test_data_stream = tsk_data_stream.TSKDataStream(file_entry, tsk_attribute)
self.assertEqual(test_data_stream.name, 'rsrc')
def testGetExtents(self):
"""Test the GetExtents function."""
path_spec = path_spec_factory.Factory.NewPathSpec(
definitions.TYPE_INDICATOR_TSK, inode=25,
location='/a_directory/a_resourcefork', parent=self._raw_path_spec)
file_entry = self._file_system.GetFileEntryByPathSpec(path_spec)
self.assertIsNotNone(file_entry)
test_data_stream = tsk_data_stream.TSKDataStream(file_entry, None)
extents = test_data_stream.GetExtents()
self.assertEqual(len(extents), 0)
tsk_file = file_entry.GetTSKFile()
self.assertIsNotNone(tsk_file)
tsk_attribute = self._GetResourceForkAttribute(tsk_file)
self.assertIsNotNone(tsk_attribute)
test_data_stream = tsk_data_stream.TSKDataStream(file_entry, tsk_attribute)
extents = test_data_stream.GetExtents()
self.assertEqual(len(extents), 1)
self.assertEqual(extents[0].extent_type, definitions.EXTENT_TYPE_DATA)
self.assertEqual(extents[0].offset, 1142784)
self.assertEqual(extents[0].size, 4096)
def testIsDefault(self):
"""Test the IsDefault function."""
path_spec = path_spec_factory.Factory.NewPathSpec(
definitions.TYPE_INDICATOR_TSK, inode=25,
location='/a_directory/a_resourcefork', parent=self._raw_path_spec)
file_entry = self._file_system.GetFileEntryByPathSpec(path_spec)
self.assertIsNotNone(file_entry)
test_data_stream = tsk_data_stream.TSKDataStream(file_entry, None)
result = test_data_stream.IsDefault()
self.assertTrue(result)
tsk_file = file_entry.GetTSKFile()
self.assertIsNotNone(tsk_file)
tsk_attribute = self._GetResourceForkAttribute(tsk_file)
self.assertIsNotNone(tsk_attribute)
test_data_stream = tsk_data_stream.TSKDataStream(file_entry, tsk_attribute)
result = test_data_stream.IsDefault()
self.assertFalse(result)
class TSKDataStreamTestNTFS(shared_test_lib.BaseTestCase):
"""Tests the SleuthKit (TSK) data stream on NTFS."""
def setUp(self):
"""Sets up the needed objects used throughout the test."""
self._resolver_context = context.Context()
test_path = self._GetTestFilePath(['ntfs.raw'])
self._SkipIfPathNotExists(test_path)
test_os_path_spec = path_spec_factory.Factory.NewPathSpec(
definitions.TYPE_INDICATOR_OS, location=test_path)
self._raw_path_spec = path_spec_factory.Factory.NewPathSpec(
definitions.TYPE_INDICATOR_RAW, parent=test_os_path_spec)
self._tsk_path_spec = path_spec_factory.Factory.NewPathSpec(
definitions.TYPE_INDICATOR_TSK, location='/',
parent=self._raw_path_spec)
self._file_system = tsk_file_system.TSKFileSystem(
self._resolver_context, self._tsk_path_spec)
self._file_system.Open()
def tearDown(self):
"""Cleans up the needed objects used throughout the test."""
self._resolver_context.Empty()
def _GetAlternateDataStreamAttribute(self, tsk_file):
"""Retrieves an ADS attribute.
Args:
tsk_file (pytsk3.File): TSK file.
Returns:
pytsk3.Attribute: TSK attribute or None.
"""
for tsk_attribute in tsk_file:
if getattr(tsk_attribute, 'info', None):
attribute_name = getattr(tsk_attribute.info, 'name', None)
attribute_type = getattr(tsk_attribute.info, 'type', None)
if (attribute_type == pytsk3.TSK_FS_ATTR_TYPE_NTFS_DATA and
attribute_name):
return tsk_attribute
return None
def testName(self):
"""Test the name property."""
path_spec = path_spec_factory.Factory.NewPathSpec(
definitions.TYPE_INDICATOR_TSK, inode=10, location='/$UpCase',
parent=self._raw_path_spec)
file_entry = self._file_system.GetFileEntryByPathSpec(path_spec)
self.assertIsNotNone(file_entry)
test_data_stream = tsk_data_stream.TSKDataStream(file_entry, None)
self.assertEqual(test_data_stream.name, '')
tsk_file = file_entry.GetTSKFile()
self.assertIsNotNone(tsk_file)
tsk_attribute = self._GetAlternateDataStreamAttribute(tsk_file)
self.assertIsNotNone(tsk_attribute)
test_data_stream = tsk_data_stream.TSKDataStream(file_entry, tsk_attribute)
self.assertEqual(test_data_stream.name, '$Info')
def testGetExtents(self):
"""Test the GetExtents function."""
path_spec = path_spec_factory.Factory.NewPathSpec(
definitions.TYPE_INDICATOR_TSK, inode=10, location='/$UpCase',
parent=self._raw_path_spec)
file_entry = self._file_system.GetFileEntryByPathSpec(path_spec)
self.assertIsNotNone(file_entry)
test_data_stream = tsk_data_stream.TSKDataStream(file_entry, None)
extents = test_data_stream.GetExtents()
self.assertEqual(len(extents), 1)
self.assertEqual(extents[0].extent_type, definitions.EXTENT_TYPE_DATA)
self.assertEqual(extents[0].offset, 823296)
self.assertEqual(extents[0].size, 131072)
tsk_file = file_entry.GetTSKFile()
self.assertIsNotNone(tsk_file)
tsk_attribute = self._GetAlternateDataStreamAttribute(tsk_file)
self.assertIsNotNone(tsk_attribute)
test_data_stream = tsk_data_stream.TSKDataStream(file_entry, tsk_attribute)
extents = test_data_stream.GetExtents()
# No extents are returned for a resident $DATA attribute.
self.assertEqual(len(extents), 0)
def testIsDefault(self):
"""Test the IsDefault function."""
path_spec = path_spec_factory.Factory.NewPathSpec(
definitions.TYPE_INDICATOR_TSK, inode=10, location='/$UpCase',
parent=self._raw_path_spec)
file_entry = self._file_system.GetFileEntryByPathSpec(path_spec)
self.assertIsNotNone(file_entry)
test_data_stream = tsk_data_stream.TSKDataStream(file_entry, None)
result = test_data_stream.IsDefault()
self.assertTrue(result)
tsk_file = file_entry.GetTSKFile()
self.assertIsNotNone(tsk_file)
tsk_attribute = self._GetAlternateDataStreamAttribute(tsk_file)
self.assertIsNotNone(tsk_attribute)
test_data_stream = tsk_data_stream.TSKDataStream(file_entry, tsk_attribute)
result = test_data_stream.IsDefault()
self.assertFalse(result)
if __name__ == '__main__':
unittest.main()
| 35.751553
| 79
| 0.749826
| 1,427
| 11,512
| 5.698669
| 0.093203
| 0.062961
| 0.051648
| 0.035416
| 0.923635
| 0.899779
| 0.899779
| 0.899779
| 0.899779
| 0.899779
| 0
| 0.00751
| 0.155577
| 11,512
| 321
| 80
| 35.862928
| 0.82903
| 0.097724
| 0
| 0.84878
| 0
| 0
| 0.024128
| 0.015239
| 0
| 0
| 0
| 0
| 0.219512
| 1
| 0.082927
| false
| 0
| 0.039024
| 0
| 0.160976
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 7
|
10e8e68f4bbeaa1e0427a2d2ad21d60ee856f49f
| 51,413
|
py
|
Python
|
imsitu_scorer_log.py
|
thilinicooray/mac-network-pytorch
|
0e4bf3f7f301570b652490f697758361c866f3c1
|
[
"MIT"
] | null | null | null |
imsitu_scorer_log.py
|
thilinicooray/mac-network-pytorch
|
0e4bf3f7f301570b652490f697758361c866f3c1
|
[
"MIT"
] | null | null | null |
imsitu_scorer_log.py
|
thilinicooray/mac-network-pytorch
|
0e4bf3f7f301570b652490f697758361c866f3c1
|
[
"MIT"
] | null | null | null |
#original code https://github.com/my89/imSitu/blob/master/imsitu.py
import torch
import torchnet.meter as meter
class imsitu_scorer():
def __init__(self, encoder,topk, nref, write_to_file=False):
self.score_cards = []
self.topk = topk
self.nref = nref
self.encoder = encoder
self.hico_pred = None
self.hico_target = None
self.write_to_file = write_to_file
if self.write_to_file:
self.role_dict = {}
self.value_all_dict = {}
self.role_pred = {}
self.vall_all_correct = {}
self.fail_verb_role = {}
self.all_verb_role = {}
self.fail_agent = {}
self.pass_list = []
self.all_res = {}
self.correct_roles = {}
self.topk_issue = {}
def clear(self):
self.score_cards = {}
def add_point_hico(self, verb_predict, gt_verbs):
verb_predict = torch.sigmoid(verb_predict)
if self.hico_pred is None:
self.hico_pred = verb_predict
else:
self.hico_pred = torch.cat([self.hico_pred.clone(), verb_predict], 0)
if self.hico_target is None:
self.hico_target = gt_verbs
else:
self.hico_target = torch.cat([self.hico_target.clone(), gt_verbs], 0)
def get_average_results_hico(self):
mtr = meter.mAPMeter()
print('hico scoring :', self.hico_pred[0], self.hico_target[0])
mtr.add(self.hico_pred, self.hico_target)
map = mtr.value()
return map
def add_point(self, verb_predict, gt_verbs, labels_predict, gt_labels):
#encoded predictions should be batch x verbs x values #assumes the are the same order as the references
#encoded reference should be batch x 1+ references*roles,values (sorted)
batch_size = verb_predict.size()[0]
for i in range(batch_size):
verb_pred = verb_predict[i]
gt_verb = gt_verbs[i]
label_pred = labels_predict[i]
gt_label = gt_labels[i]
#print('check sizes:', verb_pred.size(), gt_verb.size(), label_pred.size(), gt_label.size())
sorted_idx = torch.sort(verb_pred, 0, True)[1]
gt_v = gt_verb
role_set = self.encoder.get_role_ids(gt_v)
#print('sorted idx:',self.topk, sorted_idx[:self.topk], gt_v)
#print('groud truth verb id:', gt_v)
new_card = {"verb":0.0, "value":0.0, "value*":0.0, "n_value":0.0, "value-all":0.0, "value-all*":0.0}
score_card = new_card
verb_found = (torch.sum(sorted_idx[0:self.topk] == gt_v) == 1)
if verb_found: score_card["verb"] += 1
gt_role_count = self.encoder.get_role_count(gt_v)
gt_role_list = self.encoder.verb2_role_dict[self.encoder.verb_list[gt_v]]
score_card["n_value"] += gt_role_count
all_found = True
pred_list = []
for k in range(0, self.encoder.get_max_role_count()):
role_id = role_set[k]
if role_id == len(self.encoder.role_list):
continue
current_role = self.encoder.role_list[role_id]
if current_role not in gt_role_list:
continue
label_id = torch.max(label_pred[k],0)[1]
pred_list.append(label_id.item())
found = False
for r in range(0,self.nref):
gt_label_id = gt_label[r][k]
#print('ground truth label id = ', gt_label_id)
if label_id == gt_label_id:
found = True
break
if not found: all_found = False
#both verb and at least one val found
if found and verb_found: score_card["value"] += 1
#at least one val found
if found: score_card["value*"] += 1
'''if self.topk == 1:
print('predicted labels :',pred_list)'''
#both verb and all values found
score_card["value*"] /= gt_role_count
score_card["value"] /= gt_role_count
if all_found and verb_found: score_card["value-all"] += 1
#all values found
if all_found: score_card["value-all*"] += 1
self.score_cards.append(new_card)
def add_point_noun(self, gt_verbs, labels_predict, gt_labels):
#encoded predictions should be batch x verbs x values #assumes the are the same order as the references
#encoded reference should be batch x 1+ references*roles,values (sorted)
batch_size = gt_verbs.size()[0]
for i in range(batch_size):
gt_verb = gt_verbs[i]
label_pred = labels_predict[i]
gt_label = gt_labels[i]
#print('check sizes:', verb_pred.size(), gt_verb.size(), label_pred.size(), gt_label.size())
#sorted_idx = torch.sort(verb_pred, 0, True)[1]
gt_v = gt_verb
role_set = self.encoder.get_role_ids(gt_v)
#print('sorted idx:',self.topk, sorted_idx[:self.topk], gt_v)
#print('groud truth verb id:', gt_v)
new_card = {"verb":0.0, "value":0.0, "value*":0.0, "n_value":0.0, "value-all":0.0, "value-all*":0.0}
score_card = new_card
'''verb_found = (torch.sum(sorted_idx[0:self.topk] == gt_v) == 1)
if verb_found: score_card["verb"] += 1'''
verb_found = False
gt_role_count = self.encoder.get_role_count(gt_v)
gt_role_list = self.encoder.verb2_role_dict[self.encoder.verb_list[gt_v]]
score_card["n_value"] += gt_role_count
all_found = True
pred_list = []
for k in range(0, self.encoder.get_max_role_count()):
role_id = role_set[k]
if role_id == len(self.encoder.role_list):
continue
current_role = self.encoder.role_list[role_id]
if current_role not in gt_role_list:
continue
label_id = torch.max(label_pred[k],0)[1]
pred_list.append(label_id.item())
found = False
for r in range(0,self.nref):
gt_label_id = gt_label[r][k]
#print('ground truth label id = ', gt_label_id)
if label_id == gt_label_id:
found = True
break
if not found: all_found = False
#both verb and at least one val found
if found and verb_found: score_card["value"] += 1
#at least one val found
if found: score_card["value*"] += 1
'''if self.topk == 1:
print('predicted labels :',pred_list)'''
#both verb and all values found
score_card["value*"] /= gt_role_count
score_card["value"] /= gt_role_count
if all_found and verb_found: score_card["value-all"] += 1
#all values found
if all_found: score_card["value-all*"] += 1
self.score_cards.append(new_card)
def add_point_noun_sepagentplz(self, gt_verbs, place_role_label_pred, agent_role_label_pred, other_role_label_pred, gt_labels):
#encoded predictions should be batch x verbs x values #assumes the are the same order as the references
#encoded reference should be batch x 1+ references*roles,values (sorted)
batch_size = gt_verbs.size()[0]
for i in range(batch_size):
gt_verb = gt_verbs[i]
place_pred = place_role_label_pred[i]
agent_pred = agent_role_label_pred[i]
other_label_pred = other_role_label_pred[i]
gt_label = gt_labels[i]
#print('check sizes:', verb_pred.size(), gt_verb.size(), label_pred.size(), gt_label.size())
#sorted_idx = torch.sort(verb_pred, 0, True)[1]
gt_v = gt_verb
role_set = self.encoder.get_role_ids(gt_v)
#print('sorted idx:',self.topk, sorted_idx[:self.topk], gt_v)
#print('groud truth verb id:', gt_v)
new_card = {"verb":0.0, "value":0.0, "value*":0.0, "n_value":0.0, "value-all":0.0, "value-all*":0.0}
score_card = new_card
'''verb_found = (torch.sum(sorted_idx[0:self.topk] == gt_v) == 1)
if verb_found: score_card["verb"] += 1'''
verb_found = False
gt_role_count = self.encoder.get_role_count(gt_v)
gt_role_list = self.encoder.verb2_role_dict[self.encoder.verb_list[gt_v]]
score_card["n_value"] += gt_role_count
all_found = True
pred_list = []
for k in range(0, self.encoder.get_max_role_count()):
role_id = role_set[k]
if role_id == len(self.encoder.role_list):
continue
current_role = self.encoder.role_list[role_id]
if current_role not in gt_role_list:
continue
if k==0:
label_id = torch.max(place_pred,0)[1]
#print(self.encoder.verb_list[gt_v], current_role, 'place')
elif k==1:
label_id = torch.max(agent_pred,0)[1]
#print(self.encoder.verb_list[gt_v], current_role, 'agent')
else:
label_id = torch.max(other_label_pred[k-2],0)[1]
#print(self.encoder.verb_list[gt_v], current_role, 'other')
pred_list.append(label_id.item())
found = False
for r in range(0,self.nref):
gt_label_id = gt_label[r][k]
#print('ground truth label id = ', gt_label_id)
if label_id == gt_label_id:
found = True
break
if not found: all_found = False
#both verb and at least one val found
if found and verb_found: score_card["value"] += 1
#at least one val found
if found: score_card["value*"] += 1
'''if self.topk == 1:
print('predicted labels :',pred_list)'''
#both verb and all values found
score_card["value*"] /= gt_role_count
score_card["value"] /= gt_role_count
if all_found and verb_found: score_card["value-all"] += 1
#all values found
if all_found: score_card["value-all*"] += 1
self.score_cards.append(new_card)
def add_point_agent_one_only(self, id_set, labels_predict, gt_labels):
#encoded predictions should be batch x verbs x values #assumes the are the same order as the references
#encoded reference should be batch x 1+ references*roles,values (sorted)
batch_size = labels_predict.size()[0]
for i in range(batch_size):
img_id = id_set[i]
label_pred = labels_predict[i]
gt_label = gt_labels[i]
new_card = {"verb":0.0, "value":0.0, "value*":0.0, "n_value":0.0, "value-all":0.0, "value-all*":0.0}
score_card = new_card
'''verb_found = (torch.sum(sorted_idx[0:self.topk] == gt_v) == 1)
if verb_found: score_card["verb"] += 1'''
verb_found = False
all_found = True
label_id = torch.max(label_pred,0)[1]
found = False
for r in range(0,self.nref):
gt_label_id = gt_label[r]
gt_label_name = self.encoder.label_list[gt_label_id]
if self.write_to_file:
if gt_label_name not in self.role_dict:
self.role_dict[gt_label_name] = {'all':1, 'found': 0}
else:
self.role_dict[gt_label_name]['all'] += 1
#print('ground truth label id = ', gt_label_id)
if label_id == gt_label_id:
#print('correct :', img_id, self.encoder.label_list[gt_label_id])
if self.write_to_file:
self.role_dict[gt_label_name]['found'] += 1
found = True
break
if not found:
all_found = False
if self.write_to_file:
self.fail_agent[img_id] = self.encoder.label_list[label_id]
#both verb and at least one val found
if found and verb_found: score_card["value"] += 1
#at least one val found
if found: score_card["value*"] += 1
'''if self.topk == 1:
print('predicted labels :',pred_list)'''
#both verb and all values found
score_card["value*"] /= 1
score_card["value"] /= 1
if all_found and verb_found: score_card["value-all"] += 1
#all values found
if all_found: score_card["value-all*"] += 1
self.score_cards.append(new_card)
def add_point_noun_log(self, img_id, gt_verbs, labels_predict, gt_labels):
#encoded predictions should be batch x verbs x values #assumes the are the same order as the references
#encoded reference should be batch x 1+ references*roles,values (sorted)
batch_size = gt_verbs.size()[0]
for i in range(batch_size):
imgid = img_id[i]
gt_verb = gt_verbs[i]
label_pred = labels_predict[i]
gt_label = gt_labels[i]
#print('check sizes:', verb_pred.size(), gt_verb.size(), label_pred.size(), gt_label.size())
#sorted_idx = torch.sort(verb_pred, 0, True)[1]
gt_v = gt_verb
role_set = self.encoder.get_role_ids(gt_v)
#print('sorted idx:',self.topk, sorted_idx[:self.topk], gt_v)
#print('groud truth verb id:', gt_v)
new_card = {"verb":0.0, "value":0.0, "value*":0.0, "n_value":0.0, "value-all":0.0, "value-all*":0.0}
score_card = new_card
'''verb_found = (torch.sum(sorted_idx[0:self.topk] == gt_v) == 1)
if verb_found: score_card["verb"] += 1'''
verb_found = False
gt_role_count = self.encoder.get_role_count(gt_v)
gt_role_list = self.encoder.verb2_role_dict[self.encoder.verb_list[gt_v]]
score_card["n_value"] += gt_role_count
if self.write_to_file:
self.all_res[imgid] = {'gtv': gt_verb.item(),
'correct_roles':[]}
all_found = True
pred_situ = []
for k in range(0, gt_role_count):
if self.write_to_file:
all_val = self.encoder.verb_list[gt_v] + '_' + gt_role_list[k]
if all_val not in self.all_verb_role:
self.all_verb_role[all_val] = 1
else:
self.all_verb_role[all_val] += 1
label_id = torch.max(label_pred[k],0)[1]
found = False
pred_situ.append({gt_role_list[k] : self.encoder.label_list[label_id]})
for r in range(0,self.nref):
gt_label_id = gt_label[r][k]
#################################
if self.write_to_file:
role = gt_role_list[k]
gt_label_name = self.encoder.label_list[gt_label_id]
pred_label_name = self.encoder.label_list[label_id]
if role not in self.role_dict:
self.role_dict[role] = {gt_label_name : [pred_label_name]}
elif gt_label_name not in self.role_dict[role]:
self.role_dict[role][gt_label_name] = [pred_label_name]
else:
self.role_dict[role][gt_label_name].append(pred_label_name)
#######################################################################
if label_id == gt_label_id:
if self.write_to_file:
self.all_res[imgid]['correct_roles'].append(gt_role_list[k])
found = True
break
if not found:
all_found = False
if self.write_to_file:
fail_val = self.encoder.verb_list[gt_v] + '_' + gt_role_list[k]
if fail_val not in self.fail_verb_role:
self.fail_verb_role[fail_val] = 1
else:
self.fail_verb_role[fail_val] += 1
#both verb and at least one val found
if found and verb_found: score_card["value"] += 1
#at least one val found
if found: score_card["value*"] += 1
'''if self.topk == 1:
print('predicted labels :',pred_list)'''
#both verb and all values found
score_card["value*"] /= gt_role_count
score_card["value"] /= gt_role_count
if all_found and verb_found: score_card["value-all"] += 1
#all values found
if all_found:
score_card["value-all*"] += 1
if self.write_to_file:
self.vall_all_correct[imgid] = pred_situ
else:
if self.write_to_file:
self.value_all_dict[imgid] = pred_situ
self.score_cards.append(new_card)
def add_point_eval(self, verb_predict, gt_verbs, labels_predict, gt_labels):
#encoded predictions should be batch x verbs x values #assumes the are the same order as the references
#encoded reference should be batch x 1+ references*roles,values (sorted)
batch_size = verb_predict.size()[0]
for i in range(batch_size):
verb_pred = verb_predict[i]
gt_verb = gt_verbs[i]
label_pred = labels_predict[i]
gt_label = gt_labels[i]
#print('check sizes:', verb_pred.size(), gt_verb.size(), label_pred.size(), gt_label.size())
sorted_idx = torch.sort(verb_pred, 0, True)[1]
#print('top 1:', sorted_idx[0])
role_set = self.encoder.get_role_ids(sorted_idx[0])
gt_v = gt_verb
gt_role_set = self.encoder.get_role_ids(gt_v)
#print('sorted idx:',self.topk, sorted_idx[:self.topk], gt_v)
#print('groud truth verb id:', gt_v)
#print('role sets :', role_set, gt_role_set)
new_card = {"verb":0.0, "value":0.0, "value*":0.0, "n_value":0.0, "value-all":0.0, "value-all*":0.0}
score_card = new_card
verb_found = (torch.sum(sorted_idx[0:self.topk] == gt_v) == 1)
if verb_found: score_card["verb"] += 1
gt_role_count = self.encoder.get_role_count(gt_v)
gt_role_list = self.encoder.verb2_role_dict[self.encoder.verb_list[gt_v]]
score_card["n_value"] += gt_role_count
all_found = True
pred_list = []
for k in range(0, self.encoder.get_max_role_count()):
role_id = role_set[k]
if role_id == len(self.encoder.role_list):
continue
current_role = self.encoder.role_list[role_id]
if current_role not in gt_role_list:
continue
g_idx = (gt_role_set == role_id).nonzero()
label_id = torch.max(label_pred[k],0)[1]
pred_list.append(label_id.item())
found = False
for r in range(0,self.nref):
gt_label_id = gt_label[r][g_idx]
#print('ground truth label id = ', gt_label_id)
if label_id == gt_label_id:
found = True
break
if not found: all_found = False
#both verb and at least one val found
if found and verb_found: score_card["value"] += 1
#at least one val found
if found: score_card["value*"] += 1
'''if self.topk == 1:
print('predicted labels :',pred_list)'''
if len(pred_list) < gt_role_count:
all_found = False
#both verb and all values found
score_card["value*"] /= gt_role_count
score_card["value"] /= gt_role_count
'''if all_found:
print('all found role sets :pred, gt', role_set, gt_role_set)'''
if all_found and verb_found: score_card["value-all"] += 1
#all values found
if all_found: score_card["value-all*"] += 1
self.score_cards.append(new_card)
def add_point_eval5(self, verb_predict, gt_verbs, labels_predict, gt_labels):
#encoded predictions should be batch x verbs x values #assumes the are the same order as the references
#encoded reference should be batch x 1+ references*roles,values (sorted)
batch_size = verb_predict.size()[0]
for i in range(batch_size):
verb_pred = verb_predict[i]
gt_verb = gt_verbs[i]
label_pred = labels_predict[i]
gt_label = gt_labels[i]
#print('check sizes:', verb_pred.size(), gt_verb.size(), label_pred.size(), gt_label.size())
sorted_idx = torch.sort(verb_pred, 0, True)[1]
#print('top 1:', sorted_idx[0])
gt_v = gt_verb
gt_role_set = self.encoder.get_role_ids(gt_v)
#print('sorted idx:',self.topk, sorted_idx[:self.topk], gt_v)
#print('groud truth verb id:', gt_v)
#print('role sets :', role_set, gt_role_set)
new_card = {"verb":0.0, "value":0.0, "value*":0.0, "n_value":0.0, "value-all":0.0, "value-all*":0.0}
score_card = new_card
verb_found = (torch.sum(sorted_idx[0:self.topk] == gt_v) == 1)
if verb_found:
score_card["verb"] += 1
if verb_found and self.topk == 5:
gt_idx = 0
for cur_idx in range(0,self.topk):
if sorted_idx[cur_idx] == gt_v:
gt_idx = cur_idx
break
#print('correct idx :', gt_idx, self.encoder.max_role_count*gt_idx, self.encoder.max_role_count*(gt_idx+1))
label_pred = label_pred[self.encoder.max_role_count*gt_idx : self.encoder.max_role_count*(gt_idx+1)]
else:
label_pred = label_pred[:self.encoder.max_role_count]
gt_role_count = self.encoder.get_role_count(gt_v)
gt_role_list = self.encoder.verb2_role_dict[self.encoder.verb_list[gt_v]]
score_card["n_value"] += gt_role_count
all_found = True
for k in range(0, gt_role_count):
#label_id = torch.max(label_pred[k],0)[1]
label_id = label_pred[k]
found = False
for r in range(0,self.nref):
gt_label_id = gt_label[r][k]
if label_id == gt_label_id:
found = True
break
if not found: all_found = False
#both verb and at least one val found
if found and verb_found: score_card["value"] += 1
#at least one val found
if found: score_card["value*"] += 1
'''if self.topk == 1:
print('predicted labels :',pred_list)'''
#both verb and all values found
score_card["value*"] /= gt_role_count
score_card["value"] /= gt_role_count
'''if all_found:
print('all found role sets :pred, gt', role_set, gt_role_set)'''
if all_found and verb_found: score_card["value-all"] += 1
#all values found
if all_found: score_card["value-all*"] += 1
self.score_cards.append(new_card)
def add_point_eval5_log(self, img_id, verb_predict, gt_verbs, labels_predict, gt_labels):
#encoded predictions should be batch x verbs x values #assumes the are the same order as the references
#encoded reference should be batch x 1+ references*roles,values (sorted)
batch_size = verb_predict.size()[0]
for i in range(batch_size):
imgid = img_id[i]
verb_pred = verb_predict[i]
gt_verb = gt_verbs[i]
label_pred = labels_predict[i]
gt_label = gt_labels[i]
#print('check sizes:', verb_pred.size(), gt_verb.size(), label_pred.size(), gt_label.size())
sorted_idx = torch.sort(verb_pred, 0, True)[1]
#print('top 1:', sorted_idx[0])
gt_v = gt_verb
gt_role_set = self.encoder.get_role_ids(gt_v)
#print('sorted idx:',self.topk, sorted_idx[:self.topk], gt_v)
#print('groud truth verb id:', gt_v)
#print('role sets :', role_set, gt_role_set)
new_card = {"verb":0.0, "value":0.0, "value*":0.0, "n_value":0.0, "value-all":0.0, "value-all*":0.0}
score_card = new_card
verb_found = (torch.sum(sorted_idx[0:self.topk] == gt_v) == 1)
if verb_found:
score_card["verb"] += 1
if verb_found and self.topk == 5:
gt_idx = 0
for cur_idx in range(0,self.topk):
if sorted_idx[cur_idx] == gt_v:
gt_idx = cur_idx
break
#print('correct idx :', gt_idx, self.encoder.max_role_count*gt_idx, self.encoder.max_role_count*(gt_idx+1))
label_pred = label_pred[self.encoder.max_role_count*gt_idx : self.encoder.max_role_count*(gt_idx+1)]
else:
label_pred = label_pred[:self.encoder.max_role_count]
gt_role_count = self.encoder.get_role_count(gt_v)
gt_role_list = self.encoder.verb2_role_dict[self.encoder.verb_list[gt_v]]
score_card["n_value"] += gt_role_count
all_found = True
pred_situ = []
for k in range(0, gt_role_count):
label_id = torch.max(label_pred[k],0)[1]
#label_id = label_pred[k]
found = False
pred_situ.append({gt_role_list[k] : self.encoder.label_list[label_id]})
for r in range(0,self.nref):
gt_label_id = gt_label[r][k]
#################################
if self.write_to_file and verb_found:
role = gt_role_list[k]
gt_label_name = self.encoder.label_list[gt_label_id]
pred_label_name = self.encoder.label_list[label_id]
if role not in self.role_dict:
self.role_dict[role] = {gt_label_name : [pred_label_name]}
elif gt_label_name not in self.role_dict[role]:
self.role_dict[role][gt_label_name] = [pred_label_name]
else:
self.role_dict[role][gt_label_name].append(pred_label_name)
#######################################################################
if label_id == gt_label_id:
found = True
break
if not found: all_found = False
#both verb and at least one val found
if found and verb_found: score_card["value"] += 1
#at least one val found
if found: score_card["value*"] += 1
'''if self.topk == 1:
print('predicted labels :',pred_list)'''
#both verb and all values found
score_card["value*"] /= gt_role_count
score_card["value"] /= gt_role_count
'''if all_found:
print('all found role sets :pred, gt', role_set, gt_role_set)'''
if all_found and verb_found: score_card["value-all"] += 1
if verb_found and not all_found and self.write_to_file:
self.value_all_dict[imgid] = pred_situ
#all values found
if all_found: score_card["value-all*"] += 1
self.score_cards.append(new_card)
def add_point_eval5_log_sorted(self, img_id, verb_predict, gt_verbs, labels_predict, gt_labels):
#encoded predictions should be batch x verbs x values #assumes the are the same order as the references
#encoded reference should be batch x 1+ references*roles,values (sorted)
batch_size = verb_predict.size()[0]
for i in range(batch_size):
current_id = img_id[i]
verb_pred = verb_predict[i]
gt_verb = gt_verbs[i]
label_pred = labels_predict[i]
gt_label = gt_labels[i]
#print('check sizes:', verb_pred.size(), gt_verb.size(), label_pred.size(), gt_label.size())
sorted_idx = verb_pred
#print('top 1:', sorted_idx[0])
gt_v = gt_verb
gt_role_set = self.encoder.get_role_ids(gt_v)
#print('sorted idx:',self.topk, sorted_idx[:self.topk], gt_v)
#print('groud truth verb id:', gt_v)
#print('role sets :', role_set, gt_role_set)
new_card = {"verb":0.0, "value":0.0, "value*":0.0, "n_value":0.0, "value-all":0.0, "value-all*":0.0}
if self.write_to_file:
self.all_res[current_id] = {'gtv': gt_verb.item(),'found':-1, 'verbs':sorted_idx[:5].tolist(),
'pred_role_labels':[]}
score_card = new_card
verb_found = (torch.sum(sorted_idx[0:self.topk] == gt_v) == 1)
if verb_found:
score_card["verb"] += 1
if self.write_to_file:
self.all_res[current_id]['found'] = 0
if verb_found and self.topk == 5:
gt_idx = 0
for cur_idx in range(0,self.topk):
if sorted_idx[cur_idx] == gt_v:
gt_idx = cur_idx
break
#print('correct idx :', gt_idx, self.encoder.max_role_count*gt_idx, self.encoder.max_role_count*(gt_idx+1))
label_pred = label_pred[self.encoder.max_role_count*gt_idx : self.encoder.max_role_count*(gt_idx+1)]
else:
label_pred = label_pred[:self.encoder.max_role_count]
gt_role_count = self.encoder.get_role_count(gt_v)
gt_role_list = self.encoder.verb2_role_dict[self.encoder.verb_list[gt_v]]
score_card["n_value"] += gt_role_count
all_found = True
pred_situ = []
for k in range(0, gt_role_count):
label_id = label_pred[k]
found = False
pred_situ.append({gt_role_list[k] : self.encoder.label_list[label_id]})
for r in range(0,self.nref):
gt_label_id = gt_label[r][k]
if label_id == gt_label_id:
found = True
break
if not found: all_found = False
#both verb and at least one val found
if found and verb_found: score_card["value"] += 1
#at least one val found
if found: score_card["value*"] += 1
'''if self.topk == 1:
print('predicted labels :',pred_list)'''
#both verb and all values found
score_card["value*"] /= gt_role_count
score_card["value"] /= gt_role_count
'''if all_found:
print('all found role sets :pred, gt', role_set, gt_role_set)'''
if all_found and verb_found: score_card["value-all"] += 1
if verb_found and not all_found and self.write_to_file:
self.value_all_dict[current_id] = pred_situ
#all values found
if all_found: score_card["value-all*"] += 1
if self.write_to_file:
self.all_res[current_id]['pred_role_labels'] = pred_situ
self.score_cards.append(score_card)
def add_point_eval5_log_sorted_beam(self, img_id, verb_predict, gt_verbs, labels_predict, gt_labels):
#encoded predictions should be batch x verbs x values #assumes the are the same order as the references
#encoded reference should be batch x 1+ references*roles,values (sorted)
batch_size = verb_predict.size()[0]
for i in range(batch_size):
current_id = img_id[i]
verb_pred = verb_predict[i]
gt_verb = gt_verbs[i]
#print('all :', labels_predict.size())
label_pred = labels_predict[i]
gt_label = gt_labels[i]
#print('check sizes:', verb_pred.size(), gt_verb.size(), label_pred.size(), gt_label.size())
sorted_idx = verb_pred
#print('top 1:', sorted_idx[0])
gt_v = gt_verb
gt_role_set = self.encoder.get_role_ids(gt_v)
#print('sorted idx:',self.topk, sorted_idx[:self.topk], gt_v)
#print('groud truth verb id:', gt_v)
#print('role sets :', role_set, gt_role_set)
new_card = {"verb":0.0, "value":0.0, "value*":0.0, "n_value":0.0, "value-all":0.0, "value-all*":0.0}
if self.write_to_file:
self.all_res[current_id] = {'gtv': gt_verb.item(),'found':-1, 'verbs':sorted_idx[:5].tolist(),
'pred_role_labels':[]}
score_card = new_card
verb_found = (torch.sum(sorted_idx[0:self.topk] == gt_v) == 1)
if verb_found:
score_card["verb"] += 1
if self.write_to_file:
self.all_res[current_id]['found'] = 0
if verb_found and self.topk == 5:
gt_idx = 0
for cur_idx in range(0,self.topk):
if sorted_idx[cur_idx] == gt_v:
gt_idx = cur_idx
break
#print('correct idx :', gt_idx, self.encoder.max_role_count*gt_idx, self.encoder.max_role_count*(gt_idx+1))
label_pred_selected = label_pred[gt_idx]
#print('5', label_pred.size(), gt_idx, label_pred_selected.size())
else:
label_pred_selected = label_pred[0]
#print('other', label_pred.size(), label_pred_selected.size())
gt_role_count = self.encoder.get_role_count(gt_v)
gt_role_list = self.encoder.verb2_role_dict[self.encoder.verb_list[gt_v]]
score_card["n_value"] += gt_role_count
all_found = True
pred_situ = []
for k in range(0, gt_role_count):
#print(label_pred_selected.size(), label_pred_selected[k])
label_id = label_pred_selected[k]
found = False
pred_situ.append({gt_role_list[k] : self.encoder.label_list[label_id]})
for r in range(0,self.nref):
gt_label_id = gt_label[r][k]
if label_id == gt_label_id:
found = True
break
if not found: all_found = False
#both verb and at least one val found
if found and verb_found: score_card["value"] += 1
#at least one val found
if found: score_card["value*"] += 1
'''if self.topk == 1:
print('predicted labels :',pred_list)'''
#both verb and all values found
score_card["value*"] /= gt_role_count
score_card["value"] /= gt_role_count
'''if all_found:
print('all found role sets :pred, gt', role_set, gt_role_set)'''
if all_found and verb_found: score_card["value-all"] += 1
if verb_found and not all_found and self.write_to_file:
self.value_all_dict[current_id] = pred_situ
#all values found
if all_found: score_card["value-all*"] += 1
if self.write_to_file:
self.all_res[current_id]['pred_role_labels'] = pred_situ
self.score_cards.append(score_card)
def add_point_verb_only(self, verb_predict, gt_verbs):
#encoded predictions should be batch x verbs x values #assumes the are the same order as the references
#encoded reference should be batch x 1+ references*roles,values (sorted)
batch_size = verb_predict.size()[0]
for i in range(batch_size):
verb_pred = verb_predict[i]
gt_verb = gt_verbs[i]
#print('check sizes:', verb_pred.size(), gt_verb.size(), label_pred.size(), gt_label.size())
sorted_idx = torch.sort(verb_pred, 0, True)[1]
gt_v = gt_verb
new_card = {"verb":0.0, "value":0.0, "value*":0.0, "n_value":0.0, "value-all":0.0, "value-all*":0.0}
score_card = new_card
verb_found = (torch.sum(sorted_idx[0:self.topk] == gt_v) == 1)
if verb_found: score_card["verb"] += 1
self.score_cards.append(new_card)
def add_point_rot_only(self, rot_predict, gt_labels):
#encoded predictions should be batch x verbs x values #assumes the are the same order as the references
#encoded reference should be batch x 1+ references*roles,values (sorted)
batch_size = rot_predict.size()[0]
for i in range(batch_size):
rot_pred = rot_predict[i]
gt_rot = gt_labels[i]
#print('check sizes:', verb_pred.size(), gt_verb.size(), label_pred.size(), gt_label.size())
sorted_idx = torch.sort(rot_pred, 0, True)[1]
gt_r = gt_rot
new_card = {"rot":0.0}
rot_found = (torch.sum(sorted_idx[0:self.topk] == gt_r) == 1)
if rot_found: new_card["rot"] += 1
self.score_cards.append(new_card)
def add_point_agent_only(self, agent_predict, gt_agents):
#encoded predictions should be batch x verbs x values #assumes the are the same order as the references
#encoded reference should be batch x 1+ references*roles,values (sorted)
batch_size = agent_predict.size()[0]
for i in range(batch_size):
agent_pred = agent_predict[i]
gt_agent_set = gt_agents[i]
#print('check sizes:', verb_pred.size(), gt_verb.size(), label_pred.size(), gt_label.size())
sorted_idx = torch.sort(agent_pred, 0, True)[1]
new_card = {"verb":0.0, "value":0.0, "value*":0.0, "n_value":0.0, "value-all":0.0, "value-all*":0.0}
score_card = new_card
for r in range(0,self.nref):
gt_agent = gt_agent_set[r]
agent_found = (torch.sum(sorted_idx[0:self.topk] == gt_agent) == 1)
if agent_found:
score_card["value*"] += 1
break
self.score_cards.append(new_card)
def add_point_verb_only_eval(self, img_id, verb_predict, gt_verbs):
#encoded predictions should be batch x verbs x values #assumes the are the same order as the references
#encoded reference should be batch x 1+ references*roles,values (sorted)
batch_size = verb_predict.size()[0]
for i in range(batch_size):
verb_pred = verb_predict[i]
gt_verb = gt_verbs[i]
current_id = img_id[i]
#print('check sizes:', verb_pred.size(), gt_verb.size(), label_pred.size(), gt_label.size())
sorted_idx = torch.sort(verb_pred, 0, True)[1]
gt_v = gt_verb
new_card = {"verb":0.0, "value":0.0, "value*":0.0, "n_value":0.0, "value-all":0.0, "value-all*":0.0}
if self.write_to_file:
self.all_res[current_id] = {'gtv': gt_verb.item(),'found':-1, 'verbs':sorted_idx[:5].tolist(),
}
score_card = new_card
verb_found = (torch.sum(sorted_idx[0:self.topk] == gt_v) == 1)
if verb_found:
score_card["verb"] += 1
if self.write_to_file:
self.pass_list.append(current_id)
self.all_res[current_id]['found'] = 0
self.score_cards.append(score_card)
def add_point_verb_only_diffeval(self, img_id, verb_predict, gt_verbs):
#encoded predictions should be batch x verbs x values #assumes the are the same order as the references
#encoded reference should be batch x 1+ references*roles,values (sorted)
batch_size = verb_predict.size()[0]
for i in range(batch_size):
verb_pred = verb_predict[i]
gt_verb = gt_verbs[i]
current_id = img_id[i]
new_card = {"verb":0.0, "value":0.0, "value*":0.0, "n_value":0.0, "value-all":0.0, "value-all*":0.0}
for r in range(0,5):
sorted_idx = torch.sort(verb_pred[r], 0, True)[1]
verb_found = (torch.sum(sorted_idx[0:self.topk] == gt_verb) == 1)
if self.write_to_file:
cur_v = self.encoder.verb_list[sorted_idx[0]]
if current_id not in self.all_res:
self.all_res[current_id] = [cur_v]
else:
self.all_res[current_id].append(cur_v)
if verb_found:
if self.write_to_file:
self.pass_list.append(current_id)
new_card["verb"] += 1
break
self.score_cards.append(new_card)
def add_point_multi_verb(self, img_id, verb_predict, gt_verbs):
#encoded predictions should be batch x verbs x values #assumes the are the same order as the references
#encoded reference should be batch x 1+ references*roles,values (sorted)
batch_size = verb_predict.size()[0]
for i in range(batch_size):
verb_pred = verb_predict[i]
gt_verb = gt_verbs[i]
current_id = img_id[i]
new_card = {"verb":0.0, "value":0.0, "value*":0.0, "n_value":0.0, "value-all":0.0, "value-all*":0.0}
logits = []
verbs = []
for r in range(0,verb_pred.size(0)):
sorted_idx = torch.sort(verb_pred[r], 0, True)[1]
sorted_logits = torch.sort(verb_pred[r], 0, True)[0]
logits.append(sorted_logits[0].item())
verbs.append(sorted_idx[0].item())
if self.write_to_file:
self.all_res[current_id] = {'gtv': gt_verb.item(),'found':-1, 'verbs':verbs,
'logits':logits}
for r in range(0,verb_pred.size(0)):
sorted_idx = torch.sort(verb_pred[r], 0, True)[1]
verb_found = (torch.sum(sorted_idx[0:self.topk] == gt_verb) == 1)
if verb_found:
if self.write_to_file:
self.pass_list.append(current_id)
self.all_res[current_id]['found'] = r
new_card["verb"] += 1
break
self.score_cards.append(new_card)
def add_point_multi_verb_sum(self, img_id, verb_predict, gt_verbs):
#encoded predictions should be batch x verbs x values #assumes the are the same order as the references
#encoded reference should be batch x 1+ references*roles,values (sorted)
batch_size = verb_predict.size()[0]
for i in range(batch_size):
verb_pred = verb_predict[i]
gt_verb = gt_verbs[i]
current_id = img_id[i]
new_card = {"verb":0.0, "value":0.0, "value*":0.0, "n_value":0.0, "value-all":0.0, "value-all*":0.0}
logits = []
verbs = []
all = torch.sum(verb_pred, 0)
sorted_idx = torch.sort(all, 0, True)[1]
sorted_logits = torch.sort(all, 0, True)[0]
if self.write_to_file:
self.all_res[current_id] = {'gtv': gt_verb.item(),'found':-1, 'verbs':sorted_idx[:5].tolist(),
'logits':sorted_logits[:5].tolist()}
verb_found = (torch.sum(sorted_idx[0:self.topk] == gt_verb) == 1)
if verb_found:
if self.write_to_file:
self.pass_list.append(current_id)
self.all_res[current_id]['found'] = 0
new_card["verb"] += 1
self.score_cards.append(new_card)
def add_point_multi_verb_avg(self, img_id, verb_predict, gt_verbs):
#encoded predictions should be batch x verbs x values #assumes the are the same order as the references
#encoded reference should be batch x 1+ references*roles,values (sorted)
batch_size = verb_predict.size()[0]
for i in range(batch_size):
verb_pred = verb_predict[i]
gt_verb = gt_verbs[i]
current_id = img_id[i]
new_card = {"verb":0.0, "value":0.0, "value*":0.0, "n_value":0.0, "value-all":0.0, "value-all*":0.0}
for r in range(0,verb_pred.size(0)):
sorted_idx = torch.sort(verb_pred[r], 0, True)[1]
verb_found = (torch.sum(sorted_idx[0:self.topk] == gt_verb) == 1)
if self.write_to_file:
sorted_logits = torch.sort(verb_pred[r], 0, True)[0]
cur_v = self.encoder.verb_list[sorted_idx[0]]
if current_id not in self.all_res:
self.all_res[current_id] = {'gtv': gt_verb.item(),'found':-1, 'verbs':[sorted_idx[0].item()],
'logits':[sorted_logits[0].item()]}
else:
self.all_res[current_id]['verbs'].append(sorted_idx[0].item())
self.all_res[current_id]['logits'].append(sorted_logits[0].item())
if verb_found:
if self.write_to_file:
self.pass_list.append(current_id)
self.all_res[current_id]['found'] = r
new_card["verb"] += 1
#break
new_card["verb"] /= verb_pred.size(0)
self.score_cards.append(new_card)
def add_point_multi_verb_beam(self, img_id, verb_predict, gt_verbs):
#encoded predictions should be batch x verbs x values #assumes the are the same order as the references
#encoded reference should be batch x 1+ references*roles,values (sorted)
batch_size = verb_predict.size()[0]
for i in range(batch_size):
verb_pred = verb_predict[i]
gt_verb = gt_verbs[i]
current_id = img_id[i]
new_card = {"verb":0.0, "value":0.0, "value*":0.0, "n_value":0.0, "value-all":0.0, "value-all*":0.0}
verb_found = (torch.sum(verb_pred[0:self.topk] == gt_verb) == 1)
if verb_found:
if self.write_to_file:
self.pass_list.append(current_id)
new_card["verb"] += 1
self.score_cards.append(new_card)
def add_point_verb_beamdirect(self, _id, verb_predict, gt_verbs):
#encoded predictions should be batch x verbs x values #assumes the are the same order as the references
#encoded reference should be batch x 1+ references*roles,values (sorted)
batch_size = verb_predict.size()[0]
for i in range(batch_size):
curr_id = _id[i]
verb_pred = verb_predict[i]
gt_verb = gt_verbs[i]
#print('check sizes:', verb_pred.size(), gt_verb.size(), label_pred.size(), gt_label.size())
sorted_idx_all = torch.sort(verb_pred, 0, True)[1]
#print('before mod :', sorted_idx_all)
n_verbs = len(self.encoder.verb_list)
#print('n verbs :', n_verbs)
sorted_idx_dup = torch.remainder(sorted_idx_all, n_verbs)
#print('after mod :', sorted_idx)
sorted_idx = []
for q in sorted_idx_dup:
if q.item() not in sorted_idx:
sorted_idx.append(q.item())
if len(sorted_idx) == 5:
break
sorted_idx = torch.tensor(sorted_idx, dtype=torch.long)
if torch.cuda.is_available():
sorted_idx = sorted_idx.to(torch.device('cuda'))
gt_v = gt_verb
new_card = {"verb":0.0, "value":0.0, "value*":0.0, "n_value":0.0, "value-all":0.0, "value-all*":0.0}
verb_found = (torch.sum(sorted_idx[0:self.topk] == gt_v) >= 1)
if verb_found: new_card["verb"] += 1
self.score_cards.append(new_card)
def combine(self, rv, card):
for (k,v) in card.items(): rv[k] += v
def get_average_results(self, groups = []):
#average across score cards.
rv = {"verb":0, "value":0 , "value*":0 , "value-all":0, "value-all*":0}
total_len = len(self.score_cards)
for card in self.score_cards:
rv["verb"] += card["verb"]
rv["value-all"] += card["value-all"]
#rv["value-all*"] += card["value-all*"]
rv["value"] += card["value"]
#rv["value*"] += card["value*"]
rv["verb"] /= total_len
rv["value-all"] /= total_len
#rv["value-all*"] /= total_len
rv["value"] /= total_len
#rv["value*"] /= total_len
return rv
def get_average_rot_results(self):
rv = {"rot":0, }
total_len = len(self.score_cards)
for card in self.score_cards:
rv["rot"] += card["rot"]
rv["rot"] /= total_len
return rv
def get_average_results_nouns(self, groups = []):
#average across score cards.
rv = {"verb":0, "value":0 , "value*":0 , "value-all":0, "value-all*":0}
total_len = len(self.score_cards)
for card in self.score_cards:
#rv["verb"] += card["verb"]
#rv["value-all"] += card["value-all"]
rv["value-all*"] += card["value-all*"]
#rv["value"] += card["value"]
rv["value*"] += card["value*"]
#rv["verb"] /= total_len
#rv["value-all"] /= total_len
rv["value-all*"] /= total_len
#rv["value"] /= total_len
rv["value*"] /= total_len
return rv
def rearrange_label_pred(self, pred):
label_pred_per_role = []
start_idx = self.encoder.role_start_idx
end_idx = self.encoder.role_end_idx
for j in range(0, self.encoder.get_max_role_count()):
label_pred_per_role.append(pred[start_idx[j]:end_idx[j]])
return label_pred_per_role
| 41.328778
| 131
| 0.542567
| 6,958
| 51,413
| 3.742598
| 0.026444
| 0.008832
| 0.020429
| 0.037211
| 0.911025
| 0.897969
| 0.885143
| 0.877232
| 0.869782
| 0.857148
| 0
| 0.01682
| 0.340848
| 51,413
| 1,243
| 132
| 41.362027
| 0.751601
| 0.171085
| 0
| 0.773684
| 0
| 0
| 0.043768
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0.038158
| false
| 0.009211
| 0.002632
| 0
| 0.048684
| 0.001316
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 7
|
80272ffd9b8458863e1a9047aaeab7f7125be983
| 2,287
|
py
|
Python
|
{{cookiecutter.project_slug}}/{{cookiecutter.project_slug}}/pages/tests/test_views.py
|
erickgnavar/django_template
|
bdaa361151296da5670f7698f2ee146edf9dc867
|
[
"MIT"
] | null | null | null |
{{cookiecutter.project_slug}}/{{cookiecutter.project_slug}}/pages/tests/test_views.py
|
erickgnavar/django_template
|
bdaa361151296da5670f7698f2ee146edf9dc867
|
[
"MIT"
] | null | null | null |
{{cookiecutter.project_slug}}/{{cookiecutter.project_slug}}/pages/tests/test_views.py
|
erickgnavar/django_template
|
bdaa361151296da5670f7698f2ee146edf9dc867
|
[
"MIT"
] | null | null | null |
from django.test import RequestFactory, TestCase
from django.urls import resolve
from .. import views
class HomeViewTestCase(TestCase):
def setUp(self):
self.view = views.HomeView.as_view()
self.factory = RequestFactory()
def test_match_expected_view(self):
url = resolve("/")
self.assertEqual(url.func.__name__, self.view.__name__)
def test_load_sucessful(self):
request = self.factory.get("/")
response = self.view(request)
self.assertEqual(response.status_code, 200)
class ContactViewTestCase(TestCase):
def setUp(self):
self.view = views.contact
self.factory = RequestFactory()
def test_match_expected_view(self):
url = resolve("/contact")
self.assertEqual(url.func.__name__, self.view.__name__)
def test_load_sucessful(self):
request = self.factory.get("/")
response = self.view(request)
self.assertEqual(response.status_code, 200)
class PrivacyViewTestCase(TestCase):
def setUp(self):
self.view = views.privacy
self.factory = RequestFactory()
def test_match_expected_view(self):
url = resolve("/privacy")
self.assertEqual(url.func.__name__, self.view.__name__)
def test_load_sucessful(self):
request = self.factory.get("/")
response = self.view(request)
self.assertEqual(response.status_code, 200)
class CookiesViewTestCase(TestCase):
def setUp(self):
self.view = views.cookies
self.factory = RequestFactory()
def test_match_expected_view(self):
url = resolve("/cookies")
self.assertEqual(url.func.__name__, self.view.__name__)
def test_load_sucessful(self):
request = self.factory.get("/")
response = self.view(request)
self.assertEqual(response.status_code, 200)
class FaqViewTestCase(TestCase):
def setUp(self):
self.view = views.faq
self.factory = RequestFactory()
def test_match_expected_view(self):
url = resolve("/faq")
self.assertEqual(url.func.__name__, self.view.__name__)
def test_load_sucessful(self):
request = self.factory.get("/")
response = self.view(request)
self.assertEqual(response.status_code, 200)
| 28.5875
| 63
| 0.664626
| 260
| 2,287
| 5.573077
| 0.153846
| 0.082816
| 0.05521
| 0.069013
| 0.835059
| 0.835059
| 0.835059
| 0.721187
| 0.721187
| 0.721187
| 0
| 0.008436
| 0.222562
| 2,287
| 79
| 64
| 28.949367
| 0.806524
| 0
| 0
| 0.689655
| 0
| 0
| 0.014867
| 0
| 0
| 0
| 0
| 0
| 0.172414
| 1
| 0.258621
| false
| 0
| 0.051724
| 0
| 0.396552
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 7
|
33fce3917fd9977401f83bdda26a6e930b5c9664
| 23,972
|
py
|
Python
|
netbox_dns/tests/record/test_auto_ptr.py
|
peteeckel/netbox-dns
|
8e2e1b16ffbeb19606bc13e6d668cb27a300eb9c
|
[
"MIT"
] | null | null | null |
netbox_dns/tests/record/test_auto_ptr.py
|
peteeckel/netbox-dns
|
8e2e1b16ffbeb19606bc13e6d668cb27a300eb9c
|
[
"MIT"
] | null | null | null |
netbox_dns/tests/record/test_auto_ptr.py
|
peteeckel/netbox-dns
|
8e2e1b16ffbeb19606bc13e6d668cb27a300eb9c
|
[
"MIT"
] | null | null | null |
import ipaddress
from django.test import TestCase
from netbox_dns.models import NameServer, Record, RecordTypeChoices, Zone
def reverse_name(address, reverse_zone):
reverse_pointer = ipaddress.ip_address(address).reverse_pointer
zone_name = f'{reverse_zone.name.rstrip(".")}.'
if reverse_pointer.endswith(reverse_zone.name):
return reverse_pointer[: -len(zone_name)]
return f"{reverse_pointer}."
class AutoPTRTest(TestCase):
zone_data = {
"default_ttl": 86400,
"soa_rname": "hostmaster.example.com",
"soa_refresh": 172800,
"soa_retry": 7200,
"soa_expire": 2592000,
"soa_ttl": 86400,
"soa_minimum": 3600,
"soa_serial": 1,
}
record_data = {
"ttl": 86400,
}
@classmethod
def setUpTestData(cls):
cls.nameserver = NameServer.objects.create(name="ns1.example.com")
cls.zones = [
Zone(name="zone1.example.com", **cls.zone_data, soa_mname=cls.nameserver),
Zone(name="1.0.10.in-addr.arpa", **cls.zone_data, soa_mname=cls.nameserver),
Zone(name="2.0.10.in-addr.arpa", **cls.zone_data, soa_mname=cls.nameserver),
Zone(name="1.1.10.in-addr.arpa", **cls.zone_data, soa_mname=cls.nameserver),
Zone(name="0.10.in-addr.arpa", **cls.zone_data, soa_mname=cls.nameserver),
Zone(name="2.10.in-addr.arpa", **cls.zone_data, soa_mname=cls.nameserver),
Zone(
name="1.0.0.0.f.e.e.b.d.a.e.d.0.8.e.f.ip6.arpa",
**cls.zone_data,
soa_mname=cls.nameserver,
),
Zone(
name="2.0.0.0.f.e.e.b.d.a.e.d.0.8.e.f.ip6.arpa",
**cls.zone_data,
soa_mname=cls.nameserver,
),
Zone(
name="1.1.0.0.f.e.e.b.d.a.e.d.0.8.e.f.ip6.arpa",
**cls.zone_data,
soa_mname=cls.nameserver,
),
Zone(
name="0.0.0.f.e.e.b.d.a.e.d.0.8.e.f.ip6.arpa",
**cls.zone_data,
soa_mname=cls.nameserver,
),
Zone(
name="2.0.0.f.e.e.b.d.a.e.d.0.8.e.f.ip6.arpa",
**cls.zone_data,
soa_mname=cls.nameserver,
),
]
Zone.objects.bulk_create(cls.zones)
def test_create_ipv4_ptr(self):
f_zone = self.zones[0]
r_zone = self.zones[1]
name = "test1"
address = "10.0.1.42"
f_record = Record(
zone=f_zone,
name=name,
type=RecordTypeChoices.A,
value=address,
**self.record_data,
)
f_record.save()
r_record = Record.objects.get(
type=RecordTypeChoices.PTR, zone=r_zone, name=reverse_name(address, r_zone)
)
self.assertEqual(r_record.value, f"{name}.{f_zone.name}.")
def test_remove_ipv4_ptr(self):
f_zone = self.zones[0]
r_zone = self.zones[1]
name = "test1"
address = "10.0.1.42"
f_record = Record(
zone=f_zone,
name=name,
type=RecordTypeChoices.A,
value=address,
**self.record_data,
)
f_record.save()
f_record.delete()
with self.assertRaises(Record.DoesNotExist):
Record.objects.get(
type=RecordTypeChoices.PTR,
zone=r_zone,
name=reverse_name(address, r_zone),
)
def test_create_multiple_ipv4(self):
f_zone = self.zones[0]
r_zone = self.zones[1]
names = ["test1", "test2", "test3", "test4"]
address = "10.0.1.42"
for name in names:
f_record = Record(
zone=f_zone,
name=name,
type=RecordTypeChoices.A,
value=address,
**self.record_data,
)
f_record.save()
r_records = Record.objects.filter(
type=RecordTypeChoices.PTR,
zone=r_zone,
name=reverse_name(address, r_zone),
)
for r_record in r_records:
self.assertTrue(
r_record.value in [f"{name}.{f_zone.name}." for name in names]
)
def test_create_duplicate_ipv4_disable_ptr_1(self):
f_zone = self.zones[0]
r_zone = self.zones[1]
name1 = "test1"
name2 = "test2"
address = "10.0.1.42"
f_record1 = Record(
zone=f_zone,
name=name1,
type=RecordTypeChoices.A,
value=address,
disable_ptr=True,
**self.record_data,
)
f_record1.save()
f_record2 = Record(
zone=f_zone,
name=name2,
type=RecordTypeChoices.A,
value=address,
**self.record_data,
)
f_record2.save()
r_record = Record.objects.get(
type=RecordTypeChoices.PTR, zone=r_zone, name=reverse_name(address, r_zone)
)
self.assertEqual(r_record.value, f"{name2}.{f_zone.name}.")
def test_create_duplicate_ipv4_disable_ptr_2(self):
f_zone = self.zones[0]
r_zone = self.zones[1]
name1 = "test1"
name2 = "test2"
address = "10.0.1.42"
f_record1 = Record(
zone=f_zone,
name=name1,
type=RecordTypeChoices.A,
value=address,
**self.record_data,
)
f_record1.save()
f_record2 = Record(
zone=f_zone,
name=name2,
type=RecordTypeChoices.A,
value=address,
disable_ptr=True,
**self.record_data,
)
f_record2.save()
r_record = Record.objects.get(
type=RecordTypeChoices.PTR, zone=r_zone, name=reverse_name(address, r_zone)
)
self.assertEqual(r_record.value, f"{name1}.{f_zone.name}.")
def test_change_name_ipv4(self):
f_zone = self.zones[0]
r_zone = self.zones[1]
name1 = "test1"
name2 = "test2"
address = "10.0.1.42"
f_record = Record(
zone=f_zone,
name=name1,
type=RecordTypeChoices.A,
value=address,
**self.record_data,
)
f_record.save()
f_record.name = name2
f_record.save()
r_record = Record.objects.get(
type=RecordTypeChoices.PTR, zone=r_zone, name=reverse_name(address, r_zone)
)
self.assertEqual(r_record.value, f"{name2}.{f_zone.name}.")
def test_change_address_within_zone_ipv4(self):
f_zone = self.zones[0]
r_zone = self.zones[1]
name = "test1"
address1 = "10.0.1.23"
address2 = "10.0.1.42"
f_record = Record(
zone=f_zone,
name=name,
type=RecordTypeChoices.A,
value=address1,
**self.record_data,
)
f_record.save()
f_record.value = address2
f_record.save()
r_record = Record.objects.get(
type=RecordTypeChoices.PTR, zone=r_zone, name=reverse_name(address2, r_zone)
)
self.assertEqual(r_record.value, f"{name}.{f_zone.name}.")
def test_change_address_outside_zone_ipv4_old_zone(self):
f_zone = self.zones[0]
r_zone = self.zones[1]
name = "test1"
address1 = "10.0.1.23"
address2 = "10.0.2.42"
f_record = Record(
zone=f_zone,
name=name,
type=RecordTypeChoices.A,
value=address1,
**self.record_data,
)
f_record.save()
f_record.value = address2
f_record.save()
with self.assertRaises(Record.DoesNotExist):
Record.objects.get(
type=RecordTypeChoices.PTR,
zone=r_zone,
name=reverse_name(address1, r_zone),
)
def test_change_address_outside_zone_ipv4_new_zone(self):
f_zone = self.zones[0]
r_zone = self.zones[2]
name = "test1"
address1 = "10.0.1.23"
address2 = "10.0.2.42"
f_record = Record(
zone=f_zone,
name=name,
type=RecordTypeChoices.A,
value=address1,
**self.record_data,
)
f_record.save()
f_record.value = address2
f_record.save()
r_record = Record.objects.get(
type=RecordTypeChoices.PTR, zone=r_zone, name=reverse_name(address2, r_zone)
)
self.assertEqual(r_record.value, f"{name}.{f_zone.name}.")
def test_change_address_outside_zone_ipv4_no_zone(self):
f_zone = self.zones[0]
name = "test1"
address1 = "10.0.1.23"
address2 = "10.3.1.23"
f_record = Record(
zone=f_zone,
name=name,
type=RecordTypeChoices.A,
value=address1,
**self.record_data,
)
f_record.save()
f_record.value = address2
f_record.save()
with self.assertRaises(Record.DoesNotExist):
Record.objects.get(
type=RecordTypeChoices.PTR, value=f"{name}.{f_zone.name}."
)
def test_change_ttl_ipv4(self):
f_zone = self.zones[0]
r_zone = self.zones[1]
name = "test1"
address = "10.0.1.42"
f_record = Record(
zone=f_zone,
name=name,
type=RecordTypeChoices.A,
value=address,
**self.record_data,
)
f_record.save()
f_record.ttl = 98765
f_record.save()
r_record = Record.objects.get(
type=RecordTypeChoices.PTR, zone=r_zone, name=reverse_name(address, r_zone)
)
self.assertEqual(r_record.ttl, 98765)
def test_ipv4_delete_ptr_zone_with_parent(self):
f_zone = self.zones[0]
r_zone1 = self.zones[1]
r_zone2 = self.zones[4]
name = "test1"
address = "10.0.1.42"
f_record = Record(
zone=f_zone,
name=name,
type=RecordTypeChoices.A,
value=address,
**self.record_data,
)
f_record.save()
r_zone1.delete()
r_record = Record.objects.get(
type=RecordTypeChoices.PTR,
zone=r_zone2,
name=reverse_name(address, r_zone2),
)
self.assertEqual(r_record.value, f"{name}.{f_zone.name}.")
def test_ipv4_create_ptr_zone_with_parent(self):
f_zone = self.zones[0]
name = "test1"
address = "10.2.1.42"
f_record = Record(
zone=f_zone,
name=name,
type=RecordTypeChoices.A,
value=address,
**self.record_data,
)
f_record.save()
r_zone = Zone(
name="1.2.10.in-addr.arpa", **self.zone_data, soa_mname=self.nameserver
)
r_zone.save()
r_record = Record.objects.get(
type=RecordTypeChoices.PTR,
zone=r_zone,
name=reverse_name(address, r_zone),
)
self.assertEqual(r_record.value, f"{name}.{f_zone.name}.")
def test_ipv4_create_ptr_zone_without_parent(self):
f_zone = self.zones[0]
name = "test1"
address = "10.3.1.42"
f_record = Record(
zone=f_zone,
name=name,
type=RecordTypeChoices.A,
value=address,
**self.record_data,
)
f_record.save()
r_zone = Zone(
name="1.3.10.in-addr.arpa", **self.zone_data, soa_mname=self.nameserver
)
r_zone.save()
r_record = Record.objects.get(
type=RecordTypeChoices.PTR, zone=r_zone, name=reverse_name(address, r_zone)
)
self.assertEqual(r_record.value, f"{name}.{f_zone.name}.")
def test_create_ipv6_ptr(self):
f_zone = self.zones[0]
r_zone = self.zones[6]
name = "test1"
address = "fe80:dead:beef:1::42"
f_record = Record(
zone=f_zone,
name=name,
type=RecordTypeChoices.AAAA,
value=address,
**self.record_data,
)
f_record.save()
r_record = Record.objects.get(
type=RecordTypeChoices.PTR, zone=r_zone, name=reverse_name(address, r_zone)
)
self.assertEqual(r_record.value, f"{name}.{f_zone.name}.")
def test_remove_ipv6_ptr(self):
f_zone = self.zones[0]
r_zone = self.zones[6]
name = "test1"
address = "fe80:dead:beef:1::42"
f_record = Record(
zone=f_zone,
name=name,
type=RecordTypeChoices.AAAA,
value=address,
**self.record_data,
)
f_record.save()
f_record.delete()
with self.assertRaises(Record.DoesNotExist):
Record.objects.get(
type=RecordTypeChoices.PTR,
zone=r_zone,
name=reverse_name(address, r_zone),
)
def test_create_multiple_ipv6(self):
f_zone = self.zones[0]
r_zone = self.zones[6]
names = ["test1", "test2", "test3", "test4"]
address = "fe80:dead:beef:1::42"
for name in names:
f_record = Record(
zone=f_zone,
name=name,
type=RecordTypeChoices.AAAA,
value=address,
**self.record_data,
)
f_record.save()
r_records = Record.objects.filter(
type=RecordTypeChoices.PTR,
zone=r_zone,
name=reverse_name(address, r_zone),
)
for r_record in r_records:
self.assertTrue(
r_record.value in [f"{name}.{f_zone.name}." for name in names]
)
def test_create_duplicate_ipv6_disable_ptr_1(self):
f_zone = self.zones[0]
r_zone = self.zones[6]
name1 = "test1"
name2 = "test2"
address = "fe80:dead:beef:1::42"
f_record1 = Record(
zone=f_zone,
name=name1,
type=RecordTypeChoices.AAAA,
value=address,
disable_ptr=True,
**self.record_data,
)
f_record1.save()
f_record2 = Record(
zone=f_zone,
name=name2,
type=RecordTypeChoices.AAAA,
value=address,
**self.record_data,
)
f_record2.save()
r_record = Record.objects.get(
type=RecordTypeChoices.PTR, zone=r_zone, name=reverse_name(address, r_zone)
)
self.assertEqual(r_record.value, f"{name2}.{f_zone.name}.")
def test_create_duplicate_ipv6_disable_ptr_2(self):
f_zone = self.zones[0]
r_zone = self.zones[6]
name1 = "test1"
name2 = "test2"
address = "fe80:dead:beef:1::42"
f_record1 = Record(
zone=f_zone,
name=name1,
type=RecordTypeChoices.AAAA,
value=address,
**self.record_data,
)
f_record1.save()
f_record2 = Record(
zone=f_zone,
name=name2,
type=RecordTypeChoices.AAAA,
value=address,
disable_ptr=True,
**self.record_data,
)
f_record2.save()
r_record = Record.objects.get(
type=RecordTypeChoices.PTR, zone=r_zone, name=reverse_name(address, r_zone)
)
self.assertEqual(r_record.value, f"{name1}.{f_zone.name}.")
def test_change_name_ipv6(self):
f_zone = self.zones[0]
r_zone = self.zones[6]
name1 = "test1"
name2 = "test2"
address = "fe80:dead:beef:1::42"
f_record = Record(
zone=f_zone,
name=name1,
type=RecordTypeChoices.AAAA,
value=address,
**self.record_data,
)
f_record.save()
f_record.name = name2
f_record.save()
r_record = Record.objects.get(
type=RecordTypeChoices.PTR, zone=r_zone, name=reverse_name(address, r_zone)
)
self.assertEqual(r_record.value, f"{name2}.{f_zone.name}.")
def test_change_address_within_zone_ipv6(self):
f_zone = self.zones[0]
r_zone = self.zones[6]
name = "test1"
address1 = "fe80:dead:beef:1::23"
address2 = "fe80:dead:beef:1::42"
f_record = Record(
zone=f_zone,
name=name,
type=RecordTypeChoices.AAAA,
value=address1,
**self.record_data,
)
f_record.save()
f_record.value = address2
f_record.save()
r_record = Record.objects.get(
type=RecordTypeChoices.PTR, zone=r_zone, name=reverse_name(address2, r_zone)
)
self.assertEqual(r_record.value, f"{name}.{f_zone.name}.")
def test_change_address_outside_zone_ipv6_old_zone(self):
f_zone = self.zones[0]
r_zone = self.zones[6]
name = "test1"
address1 = "fe80:dead:beef:1::23"
address2 = "fe80:dead:beef:2::42"
f_record = Record(
zone=f_zone,
name=name,
type=RecordTypeChoices.AAAA,
value=address1,
**self.record_data,
)
f_record.save()
f_record.value = address2
f_record.save()
with self.assertRaises(Record.DoesNotExist):
Record.objects.get(
type=RecordTypeChoices.PTR,
zone=r_zone,
name=reverse_name(address1, r_zone),
)
def test_change_address_outside_zone_ipv6_new_zone(self):
f_zone = self.zones[0]
r_zone = self.zones[7]
name = "test1"
address1 = "fe80:dead:beef:1::23"
address2 = "fe80:dead:beef:2::42"
f_record = Record(
zone=f_zone,
name=name,
type=RecordTypeChoices.AAAA,
value=address1,
**self.record_data,
)
f_record.save()
f_record.value = address2
f_record.save()
r_record = Record.objects.get(
type=RecordTypeChoices.PTR, zone=r_zone, name=reverse_name(address2, r_zone)
)
self.assertEqual(r_record.value, f"{name}.{f_zone.name}.")
def test_change_address_outside_zone_ipv6_no_zone(self):
f_zone = self.zones[0]
name = "test1"
address1 = "fe80:dead:beef:1::23"
address2 = "fe80:dead:beef:31::42"
f_record = Record(
zone=f_zone,
name=name,
type=RecordTypeChoices.AAAA,
value=address1,
**self.record_data,
)
f_record.save()
f_record.value = address2
f_record.save()
with self.assertRaises(Record.DoesNotExist):
Record.objects.get(
type=RecordTypeChoices.PTR, value=f"{name}.{f_zone.name}."
)
def test_change_ttl_ipv6(self):
f_zone = self.zones[0]
r_zone = self.zones[6]
name = "test1"
address = "fe80:dead:beef:1::42"
f_record = Record(
zone=f_zone,
name=name,
type=RecordTypeChoices.AAAA,
value=address,
**self.record_data,
)
f_record.save()
f_record.ttl = 98765
f_record.save()
r_record = Record.objects.get(
type=RecordTypeChoices.PTR, zone=r_zone, name=reverse_name(address, r_zone)
)
self.assertEqual(r_record.ttl, 98765)
def test_ipv6_delete_ptr_zone_with_parent(self):
f_zone = self.zones[0]
r_zone1 = self.zones[6]
r_zone2 = self.zones[9]
name = "test1"
address = "fe80:dead:beef:1::42"
f_record = Record(
zone=f_zone,
name=name,
type=RecordTypeChoices.AAAA,
value=address,
**self.record_data,
)
f_record.save()
r_zone1.delete()
r_record = Record.objects.get(
type=RecordTypeChoices.PTR,
zone=r_zone2,
name=reverse_name(address, r_zone2),
)
self.assertEqual(r_record.value, f"{name}.{f_zone.name}.")
def test_ipv6_create_ptr_zone_with_parent(self):
f_zone = self.zones[0]
name = "test1"
address = "fe80:dead:beef:21::42"
f_record = Record(
zone=f_zone,
name=name,
type=RecordTypeChoices.AAAA,
value=address,
**self.record_data,
)
f_record.save()
r_zone = Zone(
name="1.2.0.0.f.e.e.b.d.a.e.d.0.8.e.f.ip6.arpa",
**self.zone_data,
soa_mname=self.nameserver,
)
r_zone.save()
r_record = Record.objects.get(
type=RecordTypeChoices.PTR,
zone=r_zone,
name=reverse_name(address, r_zone),
)
self.assertEqual(r_record.value, f"{name}.{f_zone.name}.")
def test_ipv6_create_ptr_zone_without_parent(self):
f_zone = self.zones[0]
name = "test1"
address = "fe80:dead:beef:31::42"
f_record = Record(
zone=f_zone,
name=name,
type=RecordTypeChoices.AAAA,
value=address,
**self.record_data,
)
f_record.save()
r_zone = Zone(
name="1.3.0.0.f.e.e.b.d.a.e.d.0.8.e.f.ip6.arpa",
**self.zone_data,
soa_mname=self.nameserver,
)
r_zone.save()
r_record = Record.objects.get(
type=RecordTypeChoices.PTR, zone=r_zone, name=reverse_name(address, r_zone)
)
self.assertEqual(r_record.value, f"{name}.{f_zone.name}.")
def test_ipv4_remove_ptr_on_type_change(self):
f_zone = self.zones[0]
r_zone = self.zones[1]
name1 = "name1"
name2 = "name2"
address = "10.0.1.1"
f_record = Record(
zone=f_zone,
name=name1,
type=RecordTypeChoices.A,
value=address,
**self.record_data,
)
f_record.save()
r_record = Record.objects.get(
type=RecordTypeChoices.PTR, zone=r_zone, name=reverse_name(address, r_zone)
)
self.assertEqual(r_record.value, f"{name1}.{f_zone.name}.")
f_record.type = RecordTypeChoices.CNAME
f_record.value = name2
f_record.save()
with self.assertRaises(Record.DoesNotExist):
r_record = Record.objects.get(
type=RecordTypeChoices.PTR,
zone=r_zone,
name=reverse_name(address, r_zone),
)
def test_ipv6_remove_ptr_on_type_change(self):
f_zone = self.zones[0]
r_zone = self.zones[6]
name1 = "name1"
name2 = "name2"
address = "fe80:dead:beef:1::42"
f_record = Record(
zone=f_zone,
name=name1,
type=RecordTypeChoices.AAAA,
value=address,
**self.record_data,
)
f_record.save()
r_record = Record.objects.get(
type=RecordTypeChoices.PTR, zone=r_zone, name=reverse_name(address, r_zone)
)
self.assertEqual(r_record.value, f"{name1}.{f_zone.name}.")
f_record.type = RecordTypeChoices.CNAME
f_record.value = name2
f_record.save()
with self.assertRaises(Record.DoesNotExist):
r_record = Record.objects.get(
type=RecordTypeChoices.PTR,
zone=r_zone,
name=reverse_name(address, r_zone),
)
| 26.844345
| 88
| 0.534165
| 2,853
| 23,972
| 4.273046
| 0.044515
| 0.068903
| 0.042818
| 0.041834
| 0.942909
| 0.939628
| 0.932901
| 0.930933
| 0.930933
| 0.92962
| 0
| 0.037313
| 0.350451
| 23,972
| 892
| 89
| 26.874439
| 0.745617
| 0
| 0
| 0.773876
| 0
| 0.009831
| 0.078216
| 0.037752
| 0
| 0
| 0
| 0
| 0.044944
| 1
| 0.044944
| false
| 0
| 0.004213
| 0
| 0.05618
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 7
|
d5149e8b65d1f6481cae80866e142e4544da3aa6
| 38
|
py
|
Python
|
models/__init__.py
|
yuanl15/photometric_optimization
|
7ac88b564e21633722fae945063ba9b2e6a70e66
|
[
"MIT"
] | 248
|
2020-04-27T10:29:37.000Z
|
2022-03-17T06:54:30.000Z
|
models/__init__.py
|
yuanl15/photometric_optimization
|
7ac88b564e21633722fae945063ba9b2e6a70e66
|
[
"MIT"
] | 12
|
2020-06-24T01:51:36.000Z
|
2022-02-23T01:33:10.000Z
|
models/__init__.py
|
yuanl15/photometric_optimization
|
7ac88b564e21633722fae945063ba9b2e6a70e66
|
[
"MIT"
] | 42
|
2020-05-10T14:59:15.000Z
|
2022-03-24T05:53:44.000Z
|
from . import FLAME
from . import lbs
| 12.666667
| 19
| 0.736842
| 6
| 38
| 4.666667
| 0.666667
| 0.714286
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.210526
| 38
| 2
| 20
| 19
| 0.933333
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| true
| 0
| 1
| 0
| 1
| 0
| 1
| 1
| 0
| null | 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 0
| 1
| 0
|
0
| 7
|
d5240dd8a91c1167b75387cdd3c13a79c78f859d
| 144
|
py
|
Python
|
test11mar.py
|
vijayjoset/PythonFCSITM
|
bddfc086c299a162594fe023627f6381f8d4c976
|
[
"MIT"
] | null | null | null |
test11mar.py
|
vijayjoset/PythonFCSITM
|
bddfc086c299a162594fe023627f6381f8d4c976
|
[
"MIT"
] | null | null | null |
test11mar.py
|
vijayjoset/PythonFCSITM
|
bddfc086c299a162594fe023627f6381f8d4c976
|
[
"MIT"
] | null | null | null |
#from colorama import init
#init()
from colorama import Fore
print(Fore.BLUE + "Something in Blue")
print(Fore.RED + "Something in Red")
| 20.571429
| 39
| 0.708333
| 21
| 144
| 4.857143
| 0.47619
| 0.235294
| 0.352941
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.180556
| 144
| 6
| 40
| 24
| 0.864407
| 0.215278
| 0
| 0
| 0
| 0
| 0.317308
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| true
| 0
| 0.333333
| 0
| 0.333333
| 0.666667
| 1
| 0
| 0
| null | 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 0
| 0
| 1
|
0
| 7
|
d5a8aae0e2b53cd79eb5fa513816fddc589239ab
| 79,152
|
py
|
Python
|
post_optimization_studies/mad_analyses/pre_select_two_signal/Output/Histos/MadAnalysis5job_0/selection_11.py
|
sheride/axion_pheno
|
7d3fc08f5ae5b17a3500eba19a2e43f87f076ce5
|
[
"MIT"
] | null | null | null |
post_optimization_studies/mad_analyses/pre_select_two_signal/Output/Histos/MadAnalysis5job_0/selection_11.py
|
sheride/axion_pheno
|
7d3fc08f5ae5b17a3500eba19a2e43f87f076ce5
|
[
"MIT"
] | null | null | null |
post_optimization_studies/mad_analyses/pre_select_two_signal/Output/Histos/MadAnalysis5job_0/selection_11.py
|
sheride/axion_pheno
|
7d3fc08f5ae5b17a3500eba19a2e43f87f076ce5
|
[
"MIT"
] | null | null | null |
def selection_11():
# Library import
import numpy
import matplotlib
import matplotlib.pyplot as plt
import matplotlib.gridspec as gridspec
# Library version
matplotlib_version = matplotlib.__version__
numpy_version = numpy.__version__
# Histo binning
xBinning = numpy.linspace(0.0,2000.0,401,endpoint=True)
# Creating data sequence: middle of each bin
xData = numpy.array([2.5,7.5,12.5,17.5,22.5,27.5,32.5,37.5,42.5,47.5,52.5,57.5,62.5,67.5,72.5,77.5,82.5,87.5,92.5,97.5,102.5,107.5,112.5,117.5,122.5,127.5,132.5,137.5,142.5,147.5,152.5,157.5,162.5,167.5,172.5,177.5,182.5,187.5,192.5,197.5,202.5,207.5,212.5,217.5,222.5,227.5,232.5,237.5,242.5,247.5,252.5,257.5,262.5,267.5,272.5,277.5,282.5,287.5,292.5,297.5,302.5,307.5,312.5,317.5,322.5,327.5,332.5,337.5,342.5,347.5,352.5,357.5,362.5,367.5,372.5,377.5,382.5,387.5,392.5,397.5,402.5,407.5,412.5,417.5,422.5,427.5,432.5,437.5,442.5,447.5,452.5,457.5,462.5,467.5,472.5,477.5,482.5,487.5,492.5,497.5,502.5,507.5,512.5,517.5,522.5,527.5,532.5,537.5,542.5,547.5,552.5,557.5,562.5,567.5,572.5,577.5,582.5,587.5,592.5,597.5,602.5,607.5,612.5,617.5,622.5,627.5,632.5,637.5,642.5,647.5,652.5,657.5,662.5,667.5,672.5,677.5,682.5,687.5,692.5,697.5,702.5,707.5,712.5,717.5,722.5,727.5,732.5,737.5,742.5,747.5,752.5,757.5,762.5,767.5,772.5,777.5,782.5,787.5,792.5,797.5,802.5,807.5,812.5,817.5,822.5,827.5,832.5,837.5,842.5,847.5,852.5,857.5,862.5,867.5,872.5,877.5,882.5,887.5,892.5,897.5,902.5,907.5,912.5,917.5,922.5,927.5,932.5,937.5,942.5,947.5,952.5,957.5,962.5,967.5,972.5,977.5,982.5,987.5,992.5,997.5,1002.5,1007.5,1012.5,1017.5,1022.5,1027.5,1032.5,1037.5,1042.5,1047.5,1052.5,1057.5,1062.5,1067.5,1072.5,1077.5,1082.5,1087.5,1092.5,1097.5,1102.5,1107.5,1112.5,1117.5,1122.5,1127.5,1132.5,1137.5,1142.5,1147.5,1152.5,1157.5,1162.5,1167.5,1172.5,1177.5,1182.5,1187.5,1192.5,1197.5,1202.5,1207.5,1212.5,1217.5,1222.5,1227.5,1232.5,1237.5,1242.5,1247.5,1252.5,1257.5,1262.5,1267.5,1272.5,1277.5,1282.5,1287.5,1292.5,1297.5,1302.5,1307.5,1312.5,1317.5,1322.5,1327.5,1332.5,1337.5,1342.5,1347.5,1352.5,1357.5,1362.5,1367.5,1372.5,1377.5,1382.5,1387.5,1392.5,1397.5,1402.5,1407.5,1412.5,1417.5,1422.5,1427.5,1432.5,1437.5,1442.5,1447.5,1452.5,1457.5,1462.5,1467.5,1472.5,1477.5,1482.5,1487.5,1492.5,1497.5,1502.5,1507.5,1512.5,1517.5,1522.5,1527.5,1532.5,1537.5,1542.5,1547.5,1552.5,1557.5,1562.5,1567.5,1572.5,1577.5,1582.5,1587.5,1592.5,1597.5,1602.5,1607.5,1612.5,1617.5,1622.5,1627.5,1632.5,1637.5,1642.5,1647.5,1652.5,1657.5,1662.5,1667.5,1672.5,1677.5,1682.5,1687.5,1692.5,1697.5,1702.5,1707.5,1712.5,1717.5,1722.5,1727.5,1732.5,1737.5,1742.5,1747.5,1752.5,1757.5,1762.5,1767.5,1772.5,1777.5,1782.5,1787.5,1792.5,1797.5,1802.5,1807.5,1812.5,1817.5,1822.5,1827.5,1832.5,1837.5,1842.5,1847.5,1852.5,1857.5,1862.5,1867.5,1872.5,1877.5,1882.5,1887.5,1892.5,1897.5,1902.5,1907.5,1912.5,1917.5,1922.5,1927.5,1932.5,1937.5,1942.5,1947.5,1952.5,1957.5,1962.5,1967.5,1972.5,1977.5,1982.5,1987.5,1992.5,1997.5])
# Creating weights for histo: y12_PT_0
y12_PT_0_weights = numpy.array([5.30593134979,12.8881767046,18.4806958032,23.4754754257,27.5982198124,30.6196531644,32.891871173,36.2694882128,37.9930987022,39.7699291449,40.9613241008,42.0953631069,43.8681215532,44.2857211872,46.0093196766,46.0215996659,46.7175990559,47.0041588047,47.3235185248,47.3357985141,47.8925980261,47.9499179758,47.7083581876,48.3511176242,47.1474786791,48.0031179292,46.6479991169,46.4883192568,47.0410387724,45.9274397484,45.5630400677,45.1618404194,44.7442407854,45.1700404122,43.2007621381,44.4986010006,43.4014019622,43.3440820125,41.8947632827,42.0094031822,41.0882439895,40.7361242981,39.8559050696,39.1230657119,39.7576491557,38.3247184115,37.9930987022,37.256163348,37.6860429713,36.7239318145,35.7863886362,36.1507603168,34.541785727,33.9645182329,34.562257709,33.2562428536,32.9532831192,32.0812398834,31.7373401848,30.9553688702,30.8038890029,29.4446501942,29.1990064095,29.4855941583,29.0434305458,27.9994394608,28.2082392778,26.5214767561,27.0864602609,26.5296647489,25.1949939187,25.5184256352,25.645341524,24.2779187224,23.7088392212,23.7743471637,23.438631458,22.5788722115,22.742636068,22.7057881002,21.8214688753,21.2851413453,20.8716377077,20.5113620235,20.3435021706,19.8399306119,19.512402899,18.9392314013,18.9597033834,18.1040401333,18.0835681512,17.9198042948,17.8297363737,17.584088589,17.0846130267,16.003773974,16.0201499597,15.8686700924,15.864574096,15.4306024763,15.3732865266,14.9843468675,14.7755470504,14.4111753698,13.7028999905,14.0508956855,13.4859121807,12.7612608158,13.0355645754,12.474673067,12.5852129701,12.3272851961,12.3231931997,11.9055975657,11.5535058743,11.2055061793,11.3365180645,10.9352984161,10.8001945345,10.7264985991,10.0345992055,10.4481028431,10.2352110297,10.1001031481,9.76848343873,9.72754347461,9.10524402,9.21578392312,8.7940922927,8.63442443264,8.896444203,8.78999629629,8.33964869098,7.95070903186,8.33146069816,7.81560515027,7.80741715744,7.49217343373,7.59452534402,7.17692971001,7.28746961313,7.22196567054,6.66926215494,6.94356591454,6.67745014776,6.63651018365,6.49321830923,6.46865433076,6.0674306824,6.08380666804,5.90367082592,5.99373874698,6.32535845634,5.82178689768,5.63755505915,5.56795512015,5.29365136055,5.40828326008,4.88833571578,5.40009526726,5.18311145743,5.01115960813,4.94974766195,4.63450393824,4.64678392748,4.51577604229,4.53624402436,4.40932813559,4.40523613917,4.39295214994,4.27422425399,4.32335221094,4.27422425399,4.12274438675,4.11864839034,3.76655709892,3.63964041015,3.77065109533,3.61098203527,3.68058157427,3.66011119221,3.49634773574,3.32030189003,3.53728849986,3.30392550438,3.20976158691,3.16063282997,3.46359496444,2.90679945243,2.9313642309,2.83720031343,2.80444754213,2.71847161748,2.89861145961,2.97230499502,2.80854153854,2.60793131436,2.82491792419,2.52195578971,2.49739101124,2.61611970719,2.44007386148,2.60793131436,2.22308765165,2.32543956194,2.39503910095,2.31315757271,2.09617096288,2.10026495929,1.99791304899,2.14530011982,1.90784312793,2.10026495929,2.137111727,1.83005559611,2.01838343105,1.87509035664,2.00200704541,1.79730282481,1.83414959252,1.78092643916,1.64172776116,1.78502043558,1.67857452887,1.66219814322,1.56394022934,1.5189050688,1.71542129657,1.60488099345,1.4820583011,1.57622221857,1.42474115133,1.5189050688,1.35923600874,1.45749392263,1.42474115133,1.23231931997,1.42064715492,1.22413092715,1.28144807692,1.21184893791,1.21594293433,1.31420084821,1.14224939891,1.09312024197,1.32648323745,1.2405073128,1.08493224915,0.990768331673,0.978485942438,0.970297949614,1.04808548144,1.07264985991,1.06446186709,1.01123871373,0.949827567554,0.953921563966,1.00714471732,0.900698410612,0.978485942438,0.941639174731,0.900698410612,0.773781721844,0.921168792671,0.753311339785,0.704182582842,0.773781721844,0.745123346961,0.70008818643,0.806534493139,0.769687725432,0.761499732608,0.708276579254,0.794252103903,0.597736276133,0.642771036664,0.695994190018,0.724652964901,0.597736276133,0.630489047428,0.679617804371,0.618206658193,0.667335815135,0.515854747896,0.548607119191,0.638677040252,0.646865433076,0.552701515603,0.577265894074,0.585453886898,0.540419126367,0.552701515603,0.433972819659,0.474913583777,0.499478362248,0.585453886898,0.552701515603,0.466725590953,0.466725590953,0.45853719813,0.425784826835,0.507666355072,0.483101976601,0.405314444775,0.397126131952,0.417596434011,0.364373480657,0.433972819659,0.360279364245,0.397126131952,0.425784826835,0.388937979128,0.331620789362,0.323432636538,0.413502437599,0.30705629089,0.347997135009,0.319338560126,0.298868138067,0.323432636538,0.331620789362,0.343903058597,0.30705629089,0.331620789362,0.266115446772,0.257927293948,0.32752671295,0.26202137036,0.270209523184,0.294774021655,0.323432636538,0.286585868831,0.245645024712,0.245645024712,0.245645024712,0.266115446772,0.278397716007,0.225174602653,0.245645024712,0.200610104182,0.278397716007,0.249739101124,0.176045605711,0.225174602653,0.216986449829,0.180139682122,0.225174602653,0.229268679065,0.216986449829,0.200610104182,0.200610104182,0.167857452887,0.176045605711,0.212892373417,0.204704180594,0.167857452887,0.151481107239,0.200610104182,0.192421951358,0.212892373417,0.208798297005,0.200610104182,0.118728415944,0.151481107239,0.163763336475,0.159669260063,0.135104761592,0.139198838004,0.167857452887,0.122822532356,0.114634339532,0.135104761592,0.114634339532,0.102352110297,0.135104761592,0.167857452887,0.139198838004,0.122822532356,0.139198838004,0.110540263121,0.102352110297,0.118728415944,0.106446186709,0.114634339532,0.143292954416,0.0941639174731,0.098257993885,0.0614112661781,0.135104761592,0.0941639174731,0.0736934954137,0.0900698410612,0.147387030827,0.102352110297,0.0695994190018,0.106446186709,0.0777876118256,0.122822532356,0.106446186709,0.0736934954137])
# Creating weights for histo: y12_PT_1
y12_PT_1_weights = numpy.array([0.109030584056,0.235422731942,0.324313951554,0.41598289178,0.47848450557,0.516680113997,0.554875322424,0.627794138512,0.615293735754,0.677795349544,0.693073352915,0.719462958737,0.706962555979,0.757658567165,0.693073352915,0.72432415981,0.721546559197,0.798631776205,0.714601757665,0.735435762262,0.720157358891,0.723629759657,0.727102160423,0.734741362108,0.72640776027,0.779186971915,0.715296157818,0.70487935552,0.720852159044,0.672934148472,0.702101354907,0.694462553222,0.697240153834,0.698629354141,0.664600546633,0.654183744335,0.607654534069,0.661128145867,0.631960939432,0.636822140504,0.628488538666,0.617376936214,0.605571333609,0.613210135295,0.595848931464,0.611126934835,0.568070125335,0.568070125335,0.569459325642,0.534041717827,0.546541720585,0.529874916908,0.486818107408,0.505568511545,0.484734906949,0.521541315069,0.47640130511,0.490984908328,0.464595302505,0.514596513537,0.464595302505,0.471540104037,0.467373303118,0.459734101433,0.451400499594,0.402093688715,0.461817301892,0.431261295151,0.43334449561,0.41598289178,0.420844092852,0.404177289175,0.384037684732,0.397232487643,0.354870278296,0.379176483659,0.405566089481,0.377787523353,0.360425999522,0.357648118909,0.345842276304,0.311119148643,0.343064435691,0.366676160901,0.315285909563,0.314591469409,0.31250806895,0.306952387724,0.315980389716,0.320147150635,0.320147150635,0.315285909563,0.26736801899,0.301396666498,0.262506777918,0.287507423434,0.256951096692,0.273618180369,0.297229905579,0.255562176386,0.261117857611,0.250700935313,0.258340016998,0.26945141945,0.263895698224,0.239589532861,0.211116566579,0.2479230547,0.223616889337,0.237506132402,0.231255971023,0.249312015007,0.211811046733,0.229867050716,0.197921763668,0.208338725966,0.218755648265,0.191671602289,0.216672247805,0.213199967039,0.202783004741,0.192366082442,0.175004518612,0.182643600297,0.188199321523,0.184032520604,0.169448837386,0.165282036467,0.172921118152,0.16597651662,0.177087919071,0.158337434935,0.156254034475,0.172226677999,0.155559594322,0.170143277539,0.147920472636,0.149309432943,0.145837112177,0.151392793402,0.153476193862,0.163198676007,0.133336789419,0.12708662804,0.160420795394,0.137503550338,0.12291982712,0.129169988499,0.12500322758,0.12291982712,0.138892470644,0.12500322758,0.12500322758,0.111113984515,0.0951413409913,0.122225386967,0.12708662804,0.112502904822,0.115280745435,0.109030584056,0.104863823136,0.10555830329,0.115975225588,0.100002582064,0.106947223596,0.106947223596,0.107641663749,0.0993081419107,0.097224741451,0.0937524206849,0.110419504362,0.0979191816042,0.0930579405317,0.0895856597656,0.0916690202252,0.097224741451,0.0888911796124,0.0763908568544,0.0750019365479,0.0756964167012,0.0888911796124,0.084724418693,0.0638905340964,0.0763908568544,0.084724418693,0.0666683747093,0.0694462553222,0.0812520979269,0.0743074563947,0.0701406954754,0.0763908568544,0.0750019365479,0.0666683747093,0.0569459325642,0.0756964167012,0.0729185360883,0.0631960939432,0.0583348528706,0.0590292930238,0.0569459325642,0.0604182133303,0.0597237731771,0.0576403727174,0.0611126934835,0.072224095935,0.0729185360883,0.0625016137899,0.0562514524109,0.0625016137899,0.0486123707255,0.0534736117981,0.0493068108787,0.0604182133303,0.0493068108787,0.0458345301126,0.0666683747093,0.0451400499594,0.0506957711852,0.0444456098062,0.0458345301126,0.0458345301126,0.0493068108787,0.0493068108787,0.0368065081207,0.0430566494997,0.0347231196611,0.0472234504191,0.0513902113384,0.043751129653,0.0423622093465,0.0402788088868,0.0465289702658,0.0500012910319,0.0458345301126,0.0416677291933,0.0423622093465,0.0354175838143,0.0368065081207,0.0284729582821,0.0361120439675,0.0305563467417,0.0368065081207,0.031250806895,0.0395843567336,0.031250806895,0.0347231196611,0.0368065081207,0.0319452710482,0.0277784941289,0.0326397312014,0.0354175838143,0.0298618825885,0.037500968274,0.0277784941289,0.031250806895,0.0354175838143,0.0284729582821,0.0319452710482,0.0215283327499,0.0347231196611,0.0270840339756,0.0243061853628,0.0277784941289,0.0270840339756,0.0277784941289,0.0354175838143,0.0173615598305,0.0243061853628,0.0256951096692,0.0270840339756,0.0201394084434,0.0201394084434,0.0277784941289,0.025000645516,0.0222227969031,0.0236117212095,0.0243061853628,0.0194449482902,0.0180560239838,0.0215283327499,0.0180560239838,0.018750484137,0.0173615598305,0.0131947869112,0.0277784941289,0.0256951096692,0.0104169342983,0.0208338725966,0.0152781713709,0.0201394084434,0.012500322758,0.018750484137,0.0152781713709,0.0201394084434,0.012500322758,0.018750484137,0.0215283327499,0.018750484137,0.0208338725966,0.0138892470644,0.0173615598305,0.0104169342983,0.0145837112177,0.0118058626048,0.0111113984515,0.0138892470644,0.0180560239838,0.00763908568544,0.0201394084434,0.0111113984515,0.0138892470644,0.0166670956773,0.0111113984515,0.00833354983866,0.00833354983866,0.0097224741451,0.0138892470644,0.0131947869112,0.0097224741451,0.0138892470644,0.0152781713709,0.0097224741451,0.0118058626048,0.012500322758,0.0152781713709,0.00833354983866,0.0118058626048,0.00902800999188,0.0152781713709,0.0097224741451,0.0138892470644,0.0104169342983,0.0131947869112,0.00625016137899,0.0118058626048,0.00416677291933,0.0104169342983,0.00694462553222,0.00763908568544,0.00763908568544,0.0104169342983,0.00555569722577,0.0138892470644,0.00486123707255,0.00763908568544,0.00694462553222,0.00625016137899,0.00486123707255,0.00347231196611,0.00277784941289,0.00763908568544,0.012500322758,0.00555569722577,0.00625016137899,0.00833354983866,0.00902800999188,0.00763908568544,0.00625016137899,0.012500322758,0.00902800999188,0.00347231196611,0.00277784941289,0.00625016137899,0.00625016137899,0.00555569722577,0.00763908568544,0.00694462553222,0.00694462553222,0.00486123707255,0.00416677291933,0.00486123707255,0.00486123707255,0.00486123707255,0.00416677291933,0.00763908568544,0.00486123707255,0.00486123707255,0.00416677291933,0.00763908568544,0.00555569722577,0.00694462553222,0.00347231196611,0.00555569722577,0.00277784941289,0.00138892470644,0.00208338725966,0.00833354983866,0.00555569722577,0.00416677291933,0.000694462553222,0.00694462553222,0.00763908568544,0.00416677291933,0.00138892470644,0.00763908568544,0.00486123707255,0.00416677291933,0.00416677291933,0.00277784941289,0.00277784941289])
# Creating weights for histo: y12_PT_2
y12_PT_2_weights = numpy.array([0.0,0.0,7262.64321065,2479.70032779,1074.01177458,533.301260254,289.887696415,168.389308529,102.818391386,65.2974112405,43.8392549014,30.0127023572,21.3006420947,15.4683530391,11.3849996405,8.90415129291,7.10736411996,5.68621895828,4.24071542822,3.47479916032,3.23213877919,2.66065925139,1.98032133008,1.57994562084,1.62873005313,1.45823589477,1.09361022697,0.850326165856,0.886805636485,0.801884216793,0.656226701638,0.583092312831,0.522478790314,0.485965271644,0.40098336661,0.437500490366,0.401063479645,0.364280100216,0.206587804614,0.231053925033,0.194201408055,0.25493562086,0.194474313109,0.145863165316,0.17011574444,0.109417382218,0.0850590939437,0.0729557767947,0.0607792363314,0.0607253202586,0.0973393407312,0.0607860859959,0.036494547903,0.0365101338941,0.0850468767058,0.0242720465914,0.0243175307672,0.0364687875565,0.0486463975289,0.07280111858,0.0121172648285,0.0242888503005,0.0485847906047,0.0486219229966,0.0121753067226,0.0243191330279,0.0,0.0364477138226,0.0121306396997,0.0,0.0121714653025,0.0121450360122,0.0121287810773,0.0363677009286,0.0,0.0,0.0,0.0,0.0,0.024249126252,0.0,0.0,0.0,0.0121584629569,0.01218745987,0.0364352482343,0.0,0.0121584629569,0.0,0.012094172246,0.0,0.0121506879868,0.0,0.0121753067226,0.0121684891033,0.012167491696,0.0,0.012124563126,0.0,0.0,0.0121541809151,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.012167491696,0.0,0.0,0.0,0.012124563126,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0121541809151,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0])
# Creating weights for histo: y12_PT_3
y12_PT_3_weights = numpy.array([0.0,0.0,4074.80334303,2064.22420557,1182.80381597,738.939171336,474.634373093,320.749244309,220.983962635,154.568620462,112.066189798,79.9689809186,57.1288468082,43.7346247181,32.3700725091,24.2758922421,19.4170767139,15.2713635368,11.6875300221,9.84904029636,7.7911811871,6.19447752068,5.38182446807,4.6978286726,4.02581309249,3.3133454328,3.00224618523,2.86137979405,2.23915650628,1.90744259434,1.68673812736,1.31503534493,1.09424740941,1.26516867468,0.833383829166,0.813415426189,0.752992464357,0.803402506563,0.652521956125,0.612514077945,0.58246416238,0.411698057694,0.361464084531,0.311272341493,0.43193112207,0.301276611428,0.341421716854,0.281218293821,0.270995215276,0.140474130342,0.210750347219,0.241052817113,0.170775939099,0.100479888112,0.190887065022,0.120530478681,0.110453470088,0.120482257002,0.150562171658,0.0502574848468,0.11053607089,0.0703428677491,0.0702658452904,0.0501267450081,0.0803260775322,0.10043579854,0.070252168019,0.0601854896917,0.0502219487339,0.0602500745108,0.0602102410075,0.0502469892972,0.0401523779322,0.0201268651159,0.0402116034097,0.0100357329844,0.0200688214208,0.0300944596699,0.0903276751876,0.0603195765363,0.0200669950299,0.0301302313129,0.0,0.0200497269583,0.0100273282805,0.0301355410693,0.0,0.0100273282805,0.0100368693136,0.0201067087021,0.0200627389606,0.020081635082,0.0100586496451,0.0200554540574,0.0100311422145,0.0100698393886,0.0200699908069,0.0100570257274,0.0200529004158,0.0,0.0100459020976,0.0100231217964,0.0100368693136,0.0100548233148,0.0,0.010046112835,0.0100408981171,0.0100604595076,0.0100570257274,0.0100459020976,0.0201156753725,0.0,0.0,0.0100185847439,0.0,0.0100534183987,0.0100326132442,0.0100273282805,0.0,0.0100698393886,0.0,0.0,0.0,0.0,0.0200557267764,0.0,0.0,0.0100704592045,0.0,0.0100326132442,0.0,0.0,0.0,0.0100548233148,0.0,0.0,0.0,0.0100611537014,0.0,0.0,0.0,0.0,0.0100704592045,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0100273282805,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0100570257274,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0100273282805,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0])
# Creating weights for histo: y12_PT_4
y12_PT_4_weights = numpy.array([0.0,0.0,1938.73029834,1052.63777561,648.229756161,431.046257267,305.160575448,219.410515701,163.893129287,124.695553442,97.327800852,76.7667538156,60.6641279642,47.8873614649,38.9112990868,32.3581057057,27.0180916948,22.2162294012,18.5868883286,14.7794322247,12.650514171,10.9077225948,8.59704945164,7.71646414184,6.17676018327,5.59392837753,4.91682881667,3.88830276979,3.44296115928,3.09099800844,2.59618085797,2.36487913366,2.07931616799,1.95824685995,1.59513684363,1.39737890489,1.11097783209,1.13845336453,0.896822975306,0.902332625775,0.742337039968,0.671002057253,0.659993318983,0.561056458065,0.473067396945,0.467531339804,0.566527107912,0.412492523531,0.395877567624,0.297023136147,0.286118399538,0.313660314284,0.225524228941,0.319074738233,0.236504448005,0.19797825163,0.18695858506,0.220130727201,0.159452258382,0.121128093358,0.11003343184,0.148599075721,0.0990282686273,0.120936705927,0.104568225831,0.126449931453,0.0824951327773,0.076980729107,0.0880288336307,0.0715763396932,0.132042458246,0.0605638638633,0.0549945749417,0.0770299267675,0.0440152902668,0.0494887432833,0.060548751122,0.0275271676354,0.0604921189678,0.0440268685767,0.0495332689942,0.0604741624311,0.0385244169722,0.049495365264,0.027488650458,0.0605242538559,0.0220096276651,0.0165009603363,0.0439680832631,0.0110103673587,0.03300942423,0.0110048057074,0.0110142146076,0.0109786346645,0.0110059310379,0.00547919342063,0.0165138833552,0.0164990468683,0.022009058906,0.00549235613079,0.0109789718574,0.0109771640161,0.00549900248691,0.0109840094379,0.00551420054206,0.01098288817,0.00549596368839,0.0,0.0109880395022,0.0109917364362,0.0,0.0,0.0,0.0,0.0110134020946,0.0,0.0,0.0,0.00550412944377,0.011008254825,0.0,0.0164920105059,0.00549390803057,0.00550884608157,0.0109840094379,0.0,0.0,0.0109770827648,0.0,0.0,0.0,0.0110243060187,0.0,0.0,0.00549596368839,0.00548804574947,0.0,0.0,0.0054997784368,0.0,0.0,0.0,0.00550229722701,0.0,0.00549390803057,0.0,0.0,0.00549441178861,0.0,0.0,0.0,0.0,0.0,0.00550802138091,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.00548472663397,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.00548804574947,0.00549596368839,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.00550876076771,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.00551420054206,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.00550041625948,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0])
# Creating weights for histo: y12_PT_5
y12_PT_5_weights = numpy.array([0.0,0.0,309.645748222,177.360671578,113.779170994,78.6782875429,57.4906134709,43.4943516423,33.4174800329,26.3687721005,21.5688014447,17.461998081,14.3230724179,11.9998925688,10.2001435712,8.57561840825,7.31547384493,6.18456562788,5.51073849367,4.85421929766,4.12008694574,3.71238599577,3.16576138479,2.8283712281,2.52738425896,2.26079551427,1.91239329214,1.73480655115,1.5484247481,1.44865157916,1.22664781068,1.15167573872,1.03812115406,0.913805424965,0.801371968212,0.706589161256,0.670090575617,0.590138964145,0.525005721524,0.496377878691,0.437121238243,0.369102604398,0.360206171851,0.335551418738,0.319730608419,0.270357409943,0.261496491137,0.226948635994,0.225036385398,0.189484004458,0.17170721274,0.148012452957,0.137161802674,0.107575409507,0.121406688766,0.0947403588168,0.095725003332,0.0947394769858,0.0739996929122,0.0730469145639,0.0720275980558,0.0592192828788,0.0621795495746,0.0453916511938,0.0503169981809,0.0424210831082,0.0532948614141,0.0414506681392,0.0414434932413,0.0315795637848,0.029597520271,0.0374959645955,0.0276289046389,0.0236781893764,0.0256516310293,0.0276286801728,0.0266480319555,0.0246680046281,0.0177613877335,0.0157807791992,0.0187628872464,0.0167751078225,0.0197395311491,0.0187527782561,0.00987100815464,0.0118473237751,0.0147930202172,0.0128361970709,0.00592129898119,0.00691303421957,0.0108601419915,0.0108535282587,0.0118415958817,0.00395055011959,0.00789832808308,0.0098671120648,0.00888479237687,0.0128280200922,0.00789449211808,0.0069116032483,0.00394406425234,0.00493348388259,0.00690785546641,0.00394197391196,0.0039488357599,0.00591939903615,0.00395118303377,0.00691432089121,0.000986802596242,0.00395266290658,0.00788806276819,0.00395020660632,0.00493785295453,0.00493705529828,0.0029586793999,0.00197260992591,0.00197241271643,0.000983917004602,0.00296245884754,0.0,0.000986374908191,0.00197382484858,0.0,0.0,0.00197281996203,0.002957876132,0.0,0.000986802596242,0.000988581089103,0.00295842807806,0.00197584824997,0.0,0.0,0.000985643790098,0.00197439282793,0.00394815354334,0.00197196739176,0.0,0.00197060255782,0.00197636091446,0.00197427658657,0.00197435715386,0.0,0.000984227649625,0.00295979371366,0.000985643790098,0.000987870413457,0.0,0.0,0.0,0.000988446810287,0.000984227649625,0.0,0.0,0.00197555524156,0.000986210166121,0.0,0.0,0.0,0.0,0.0,0.0,0.000986146032955,0.000986374908191,0.00197464455061,0.000988581089103,0.0,0.000983917004602,0.0,0.00196935396524,0.000987952984908,0.0,0.0,0.00098818105848,0.0,0.000987952984908,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0019724087081,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.000985776064753,0.00098818105848,0.0,0.0,0.0,0.0,0.0,0.000985436960637,0.0,0.0,0.0,0.000987667592319,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.000988902957431,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.000988180657648,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.000988902957431,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.000986802596242,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.000987952984908,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.000986298750056,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0])
# Creating weights for histo: y12_PT_6
y12_PT_6_weights = numpy.array([0.0,0.0,74.4845889609,43.4913206131,28.3892160537,19.9470152862,14.7254113581,11.230326605,8.85557904545,7.12394260434,5.68363024256,4.75390821094,3.9414503928,3.3317711461,2.89309941003,2.46328506546,2.12100597884,1.87846156855,1.6238737785,1.41136159878,1.25531538986,1.14926034975,0.994678903239,0.926885329768,0.788979209065,0.735056267463,0.639521430254,0.599434712007,0.516476986624,0.508941122102,0.462812108872,0.400565507831,0.376833796136,0.31584818709,0.309298606594,0.286624796645,0.256620172908,0.247274700653,0.212247954343,0.200659647166,0.180231112743,0.163342454155,0.152009610185,0.139902294595,0.131072629962,0.122769815681,0.115454645763,0.108623555617,0.0940248636114,0.0877238646196,0.0821751717632,0.0824279943165,0.0753903730698,0.0650412124827,0.062769370384,0.0579778648708,0.0547044549644,0.0542022106992,0.0471398233249,0.0388155917453,0.0380616011931,0.0395726510568,0.0325183896929,0.0350368368066,0.0292351293494,0.0246967344607,0.0229334421873,0.0249613719372,0.020411690256,0.02041405084,0.0206660491894,0.0214293140362,0.0173941916671,0.0136150006205,0.0148756725357,0.0141188332787,0.0136106195365,0.0138617256652,0.0133599415138,0.0110925685209,0.0083177499759,0.00832209905195,0.00655595807373,0.00957955417122,0.00907669775459,0.00705748216071,0.00731092886844,0.00630332356695,0.00453458594627,0.00554548406247,0.0078128170455,0.00554704444854,0.00529034893699,0.00529350571804,0.00504179143895,0.0047897410767,0.00353119728807,0.00453442190569,0.0020166685637,0.00352894273025,0.00252048041672,0.00579962294145,0.00403328591473,0.00151223895784,0.00252197718705,0.00252155028142,0.00100818344453,0.00277326055953,0.00201670057162,0.00100801300236,0.000756454361774,0.00201595918819,0.00176524115558,0.00176359274774,0.00201803690225,0.000253122627546,0.000504481618733,0.00201665816113,0.000755843810712,0.0,0.00176276374262,0.00125959964987,0.00176441935225,0.00100870357322,0.00100778254534,0.000756892070072,0.0,0.00100786256514,0.0012612584603,0.000503818654703,0.00050314848889,0.000252212122269,0.00100758409624,0.000755203252225,0.00125954043522,0.000504225155279,0.00126093598051,0.000252074008097,0.000755841010019,0.000503848662127,0.000504239158744,0.000503530183329,0.000755901424967,0.000252297303344,0.000252297303344,0.000504428005468,0.000504047511326,0.0,0.000251762611051,0.0,0.0,0.000757423401534,0.000504696471892,0.00101002389989,0.000503684221441,0.0,0.0,0.0,0.000251606332384,0.0,0.000252178233884,0.000252178233884,0.0,0.000252009512139,0.000252178233884,0.0,0.0,0.000251636379819,0.0,0.000504187946073,0.000252009512139,0.000251949817369,0.0,0.000251636379819,0.000252139424282,0.000252297303344,0.0,0.0,0.0,0.0,0.000251949817369,0.000503714228866,0.0,0.000251866476749,0.0,0.000252077849047,0.0,0.0,0.000251949817369,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.000253122627546,0.0,0.0,0.000252316268036,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0])
# Creating weights for histo: y12_PT_7
y12_PT_7_weights = numpy.array([0.0,0.0,32.586575387,19.1789643881,12.6993486604,9.0422418237,6.75571907957,5.16828075364,4.03793404798,3.29925291669,2.72793862449,2.29074532722,1.91020202072,1.62555353615,1.40120181866,1.18156052057,1.06848046523,0.925348194086,0.789618339021,0.694870732126,0.633375967048,0.587187665798,0.513600358443,0.442892085011,0.41249168604,0.372818998439,0.323747464674,0.30544160731,0.261986388959,0.249361625271,0.225540118715,0.218185386637,0.183806722596,0.1660689746,0.173488984772,0.146023101108,0.139698223457,0.12711744501,0.117942123841,0.10276381701,0.110506318968,0.0944975807927,0.0904593158558,0.0790140363852,0.0772999815462,0.0664116451901,0.0581364819688,0.0546824709821,0.0612656919444,0.0629880639925,0.0523947686338,0.0483947308695,0.0500766964762,0.040649029984,0.0429373921109,0.0340654591391,0.0332041781469,0.038356439276,0.0335069365545,0.0306250735803,0.032067609529,0.0226063343144,0.0260528078303,0.0263268758652,0.0188914008817,0.0206228698773,0.0183211522319,0.0211961275174,0.0177458152895,0.0180416160319,0.0165978804856,0.0100178684466,0.0151802161911,0.015458062958,0.0123114088359,0.0125854568775,0.0103088608024,0.0105870674486,0.00744396013632,0.00830652569748,0.00801214347913,0.00916870238913,0.00801970194284,0.00916661508954,0.00687450721953,0.00515180628174,0.00715776517069,0.00600791900819,0.00600735119872,0.00687532194614,0.00400455424877,0.00372659551942,0.00687373048017,0.0040124336048,0.00543815819471,0.00342898738354,0.00343452052686,0.00286128188039,0.00400461922696,0.00344128825591,0.00228862403903,0.00229076032219,0.00172308580864,0.00258192661966,0.00114456993297,0.00286304328934,0.00171851534228,0.00200448738272,0.00171556033385,0.00114608242544,0.00114436300241,0.00171876825742,0.00143437668665,0.000571063776262,0.000860915714776,0.000859621049208,0.00200748137807,0.000569384139874,0.00114225870851,0.00200180128405,0.00114652527684,0.000573111889006,0.000857504059576,0.00143107879327,0.000287239315194,0.00143039302339,0.000862114312579,0.000573236147311,0.000287115456756,0.00200569997583,0.00142667627056,0.0,0.00229228680996,0.000287115456756,0.00114593747408,0.00114392015101,0.000573417486461,0.00143203947091,0.000858821817395,0.0,0.0,0.000574061470369,0.000859836077054,0.000286437084387,0.0,0.000286426188044,0.000287263107211,0.0,0.0,0.000286302029706,0.000573417486461,0.0,0.0,0.000861311981806,0.000286426188044,0.000287759440663,0.0,0.0,0.00114735099977,0.0,0.000573561438157,0.0,0.0,0.000286302029706,0.000573689295254,0.000284622293351,0.0,0.0,0.0,0.000287759440663,0.0,0.000287759440663,0.0,0.0,0.0,0.0,0.00028675147889,0.000572159508583,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.000287759440663,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.000286170573816,0.0,0.0,0.0,0.0,0.0,0.0,0.000287263107211,0.000286302029706,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.000286426188044,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.00028406048187,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0])
# Creating weights for histo: y12_PT_8
y12_PT_8_weights = numpy.array([0.0,0.0,5.61041710734,3.35646649036,2.24967417104,1.61341202271,1.21904525318,0.946024522905,0.748336966791,0.608979479402,0.507336355931,0.415656840186,0.357231846431,0.301648484782,0.261927787111,0.226236017833,0.197871455024,0.17453808643,0.158604427749,0.137317416485,0.119793543531,0.111225564245,0.0991102806019,0.0896316077633,0.0800021026148,0.0731360683435,0.0664883825953,0.0602060220059,0.053832005328,0.0500490438222,0.0450843599923,0.0424041213928,0.0377450123398,0.0363190665639,0.0324921880873,0.029628649884,0.0284407500989,0.025632767136,0.0239876472197,0.0225051571391,0.0208782008024,0.0211577246307,0.01815363871,0.016604416124,0.0157454053735,0.0158284951589,0.0136476041254,0.0132554904109,0.0129345096999,0.0115956644492,0.0110132941529,0.00992535692009,0.00952200307622,0.00893865628852,0.009088005002,0.00833188208729,0.00790152175814,0.00740547247021,0.00675571588023,0.00636897077336,0.00572212270306,0.00632577464613,0.00543831572337,0.00615423065859,0.00473018338331,0.00423175782642,0.00393049302316,0.00388697586912,0.00388897159977,0.0038197647571,0.0039741111791,0.00349753681924,0.00349679585918,0.00403817865912,0.00243637976036,0.0029576795253,0.00239426699385,0.00278620971758,0.00222292333374,0.00241772290583,0.00254880452319,0.00211622760029,0.00185785340397,0.00222286633681,0.00185730774221,0.0020730121947,0.00187880312753,0.00142516332541,0.00144734015929,0.00159853917743,0.00159806979097,0.00138285609879,0.00142557655313,0.00125280168341,0.00125300955456,0.00131740728148,0.000928772475003,0.00118769778133,0.00105690408227,0.00101491327242,0.000993803035453,0.000906969893004,0.00101558969184,0.000777546215682,0.000777404561553,0.000863772508492,0.0006695827197,0.000475160333152,0.000604805292663,0.000863346288822,0.000712881944309,0.000734728950384,0.000647956576718,0.000669509378065,0.0005400978545,0.000432070236975,0.000259250943181,0.000583107484427,0.000453403851609,0.000367385136578,0.000237442871036,0.000237431681198,0.000345900983001,0.000475464177067,0.000237714737998,0.000432045091272,0.000237553637859,0.000323722514648,0.000367172948752,0.000172703204464,0.000302352438744,0.000172780108406,0.000259082047874,0.00028059599911,0.000302219878978,0.000194478210009,0.000194309608068,0.000194397031297,0.000345495969541,0.000172745658793,8.64212977394e-05,0.000172741845028,8.65270354216e-05,0.000172736019607,0.000129704554827,4.31157447947e-05,0.000129580376963,0.000129631129374,2.15983444067e-05,0.000127889160781,8.6386848126e-05,4.31854822117e-05,8.63685336721e-05,4.32019526473e-05,8.64040729327e-05,8.62513966045e-05,4.32525374204e-05,8.64123291053e-05,8.6422010201e-05,0.0,4.31688022286e-05,0.000129607492413,2.16179748191e-05,6.47478808357e-05,4.31601269609e-05,0.000107872594265,8.64430906822e-05,6.47487190258e-05,2.15882986983e-05,0.0,0.000107883616464,6.47717273442e-05,0.0,6.48102840892e-05,4.31773098582e-05,2.16553078065e-05,0.0,2.15594356219e-05,6.48562588166e-05,0.0,4.31961272261e-05,4.31086620883e-05,2.16257448414e-05,0.0,4.32232007666e-05,0.0,2.15789612605e-05,0.0,0.0,0.0,0.0,2.16751645301e-05,4.32695107701e-05,2.16179748191e-05,0.0,0.0,2.15556386207e-05,0.0,4.32092029918e-05,4.3149020942e-05,0.0,0.0,0.0,4.32031261135e-05,0.0,6.48560073596e-05,0.0,0.0,0.0,4.31086620883e-05,0.0,0.0,0.0,8.63423402312e-05,0.0,0.0,0.0,0.0,0.0,2.16751645301e-05,0.0,4.32116337431e-05,0.0,0.0,0.0,0.0,0.0,0.0,2.15983444067e-05,2.16553078065e-05,0.0,0.0,0.0,2.15983444067e-05,0.0,0.0,0.0,2.16291814208e-05,0.0,0.0,0.0,2.16553078065e-05,0.0,0.0,0.0,0.0,0.0,2.16553078065e-05,0.0,0.0,0.0,0.0,2.16751645301e-05,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,2.15789612605e-05,0.0,0.0,0.0,0.0,0.0,0.0,2.16553078065e-05,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,2.15563217457e-05,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,2.16403083945e-05,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0])
# Creating weights for histo: y12_PT_9
y12_PT_9_weights = numpy.array([0.0,0.0,2.02346183182,1.23317806486,0.83466865034,0.599557886149,0.455462899413,0.350813651529,0.27994811664,0.230626019169,0.192356224684,0.158706752592,0.137961870348,0.117361549518,0.0998009684004,0.0901740988752,0.0762087683031,0.0666001193075,0.0579900652209,0.052058428984,0.0473136070633,0.0437934363857,0.038268101624,0.0347893466408,0.0320383436658,0.0291744387459,0.0252327605439,0.0220501225783,0.0211326958964,0.0199279728935,0.0173522998231,0.0174529508007,0.0152488310073,0.0138637084268,0.0134926481585,0.0120775010557,0.0117073347368,0.0108017877808,0.00985910331677,0.00904315622346,0.00855661907954,0.00787920740974,0.00810906080124,0.00734620865784,0.00688500799932,0.00616415514186,0.00584186263325,0.00589924170932,0.00451234310981,0.00456207758227,0.00527124969924,0.00428686591234,0.00353986429323,0.00371826659187,0.00312026346485,0.0035724785957,0.00286490133193,0.00345558951773,0.00308730168039,0.00297648165739,0.00255130329771,0.00274919577165,0.00238042829038,0.00238439611883,0.00178806060833,0.00189527810445,0.00192901652599,0.0017609525588,0.00180946638942,0.00167590708609,0.00152515633443,0.00133343537809,0.00124379571838,0.00107578417048,0.0013846926829,0.000963079874454,0.00107717276194,0.000993489745294,0.00085197948992,0.00098746568479,0.000906382397658,0.000624193616219,0.000565890594469,0.000680475005615,0.00076474532621,0.000707492620726,0.000510840390165,0.000822013178345,0.000709027481311,0.000479939441156,0.000591769092363,0.000623995521789,0.000481421585458,0.000709874208774,0.000482223021466,0.000511012349198,0.000369422796653,0.000283677459591,0.000423398034259,0.000395132008453,0.000366979731023,0.000255705605497,0.00056751188006,0.000227031956596,0.000255314168529,0.000339088659065,0.00034086452959,0.000283957821122,0.000255582353341,0.000226843811438,0.000227238366834,0.000228828765138,0.00025207664623,0.000283714138245,0.000253972204993,0.000340879973233,0.000170418677358,0.000283833826483,0.000169334058384,0.000450532961664,0.000199005010274,0.000340871211935,0.000196411072125,0.000141709255633,0.000228826389193,0.000251618385803,0.000198760584914,0.000113625286626,0.000141657504577,0.000198694355442,8.51516626099e-05,8.52213966009e-05,0.000113544534189,0.000113605328687,0.000170109507492,0.000111873502246,0.000141832968128,8.51223493863e-05,5.67835305598e-05,8.52732367548e-05,0.000142003902534,8.66665058573e-05,0.000113535193755,2.84102902274e-05,5.66115715274e-05,0.000113358215538,5.63745115972e-05,0.000113097797097,8.5254288592e-05,0.0,2.83706861912e-05,8.51144196693e-05,0.0,5.68825777744e-05,0.000113602373605,2.84511119355e-05,0.0,0.0,2.83706861912e-05,2.84511119355e-05,5.67623549484e-05,8.51924694684e-05,5.4502712286e-05,5.63852033505e-05,5.67041145922e-05,0.0,2.70033742898e-05,2.83520647209e-05,0.0,5.55421734521e-05,2.83520647209e-05,5.65167416155e-05,0.0,0.0,2.84314658389e-05,0.0,2.83995984741e-05,0.0,0.0,2.83995984741e-05,8.51223493863e-05,0.0,5.6574967122e-05,0.0,0.0,0.0,0.0,8.52320883542e-05,0.0,0.0,0.0,0.0,2.83706861912e-05,0.0,0.0,0.0,0.0,0.0,0.0,2.83706861912e-05,0.0,2.84511119355e-05,2.84511119355e-05,2.83706861912e-05,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,2.83706861912e-05,0.0,0.0,0.0,2.83706861912e-05,2.84511119355e-05,5.67623549484e-05,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,2.84511119355e-05,0.0,0.0,2.84102902274e-05,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,2.84102902274e-05,0.0,0.0,0.0,0.0,0.0,0.0,2.84314658389e-05,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,2.83995984741e-05,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0])
# Creating weights for histo: y12_PT_10
y12_PT_10_weights = numpy.array([0.0,0.0,1666459.58683,543469.332579,227725.633965,111234.592807,59143.801425,34344.9323873,20641.1487835,13225.788682,9135.69423681,6271.46994619,4365.95406667,3237.59204605,2338.13071887,1808.94213298,1282.74858562,1110.42178212,909.667298761,703.850204373,508.176404551,445.686265688,398.739642357,336.305682096,258.006900075,224.140585535,153.797631757,153.807206344,114.659334194,101.689690713,88.6209175698,104.285518946,70.3510287303,80.7870980216,28.6642721471,36.4967689411,31.2601849747,39.0985380322,36.488159503,36.4933736156,7.81762734474,15.6488859792,7.82191860549,10.4167470823,15.6538886049,15.6552498113,18.2536620291,2.60447573853,5.20247305761,10.4294555203,5.20877536619,7.83195846375,13.0395802652,5.21460086801,2.60481450204,5.22502909306,5.22073783231,5.21668882015,2.60827250454,2.61035699599,2.59804346167,0.0,0.0,0.0,2.61074267113,0.0,0.0,5.21550449371,0.0,0.0,0.0,0.0,0.0,0.0,0.0,5.21991111093,0.0,0.0,2.6103446913,0.0,0.0,0.0,0.0,2.60657445728,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,2.60341907325,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0])
# Creating weights for histo: y12_PT_11
y12_PT_11_weights = numpy.array([0.0,0.0,547143.001659,232639.949704,118043.703432,66849.6956534,40448.4838867,25547.4043688,17131.7112714,11895.4009112,8529.22063196,6220.34006566,4440.32866951,3406.0576626,2514.05582526,1901.19874958,1497.6180987,1196.42500166,949.967982246,724.634333156,645.642931938,513.989698798,447.620761819,379.205320388,292.807965497,251.70927426,207.49402019,165.386409069,154.84819432,149.550576167,124.256936965,104.249411566,81.0993519772,92.6977752311,66.3720150659,46.3473293341,43.1799011164,41.0771061532,41.0662174213,48.443583363,31.5888306421,30.5407998179,26.3216893955,28.4418755488,27.3725982308,15.8027857932,14.7487719379,25.2779717404,13.6937923328,15.8002155908,16.8502741046,12.6423102039,7.37587691722,6.31834250017,6.31965068704,9.4809073409,11.5841409314,2.10970603745,7.37884727093,9.48192310952,2.10712429219,5.26724513204,8.43161834873,3.15998117166,1.05314698915,6.31823861474,6.32310968702,0.0,3.16089613295,3.15698119136,2.10773221432,1.05259139449,2.10732552211,4.21376994189,0.0,1.05386610716,1.05386610716,1.0519496134,1.0539826897,1.05386610716,0.0,0.0,1.05314698915,0.0,0.0,1.05458560994,0.0,0.0,1.05085035167,0.0,1.05285687947,1.05396114309,0.0,0.0,0.0,0.0,1.05572334775,1.05537321539,0.0,0.0,1.05572334775,0.0,1.0519496134,1.05235745989,0.0,0.0,1.05572334775,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,1.0532270194,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0])
# Creating weights for histo: y12_PT_12
y12_PT_12_weights = numpy.array([0.0,0.0,100743.565892,49006.4580244,27455.9679697,16988.1707996,11156.5957932,7794.18635539,5579.39679605,4088.68975343,3083.96445698,2393.20049447,1894.95910338,1506.85433249,1230.70551356,1022.43486784,813.569817419,698.61225621,595.875532638,482.772268179,412.99023248,342.977197359,284.45880409,248.767587647,219.291108623,177.344195781,161.688797889,128.531197851,117.24807493,102.962574303,94.2079575204,78.0934425154,66.5684848709,68.4176713544,54.136281995,43.9887141122,40.0765936767,33.1683700212,26.7189061442,29.9484025418,26.9570829924,26.2598466659,18.8862852044,18.4245322698,18.1974596852,14.9758592258,12.210040758,14.2848282231,12.2135679947,10.135606785,8.29196090633,5.75743387482,7.603888479,5.53044197861,5.76091500382,5.52464778155,4.14660867016,4.60766998975,4.37428065448,3.45796489196,3.45587851994,2.76489516178,0.921510559885,2.5326996309,2.53308194032,2.76562327872,4.14674315086,2.53214364926,2.99209031596,2.53285639698,2.76389078306,1.15199126971,1.38221838735,1.1525303452,0.922527618215,0.460474599508,0.460637129041,0.690959535867,0.0,1.38238783303,2.76611240424,0.459363788919,0.460782752429,0.690632555649,0.691007180458,0.230341157278,0.460051561646,0.460987163094,0.230621607174,1.15255378327,0.691094016568,0.921258888859,0.230728615389,0.461219622591,0.0,0.0,0.0,0.230104970744,0.0,0.230527662799,0.230336584934,0.23056412628,0.691513596355,0.461195031835,0.230498922352,0.0,0.0,0.0,0.230404670592,0.0,0.230404670592,0.0,0.230596055841,0.0,0.0,0.460986010402,0.0,0.46119195799,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.230441710419,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.229928916295,0.230545491098,0.0,0.23056412628,0.0,0.0,0.230090562098,0.0,0.0,0.0,0.0,0.0,0.230404670592,0.0,0.0,0.230728615389,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.230574654198,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.230621607174,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0])
# Creating weights for histo: y12_PT_13
y12_PT_13_weights = numpy.array([0.0,0.0,10612.7656962,5599.34317217,3360.91778079,2178.97502784,1514.09012707,1090.75226384,811.582426665,617.732330675,484.561377318,385.404214023,315.382349291,254.841203859,204.471186915,172.542474153,146.928123958,122.088935624,104.785040763,90.2445908191,74.0751429755,65.9888994302,59.8117646337,53.1433490311,45.581779537,38.3504453844,36.4117713029,33.7534727898,28.8813444723,26.4994404637,24.6745996459,21.1561480773,19.5790328167,17.2521468724,15.7573528858,14.2882463068,13.6777500769,11.9074156526,11.4642241971,11.0485513916,9.63580168438,8.39043369452,6.81232971688,7.78128011929,6.1191775111,5.45579068979,5.67682858106,4.264967565,4.76243073802,3.8768018681,3.82197115994,3.40616408901,2.71403368575,2.35400718385,2.04903914127,2.54783881353,1.93820511422,1.68928040087,1.96594420581,1.43981824095,1.77196561507,1.63363140432,0.830607727921,1.07965516529,0.997186160808,0.969077743003,0.969282795994,0.692315451079,0.775843190701,0.85823294393,0.747855188631,0.636933061882,0.553533047575,0.4985903874,0.304469798431,0.553733868703,0.415108428839,0.332436064115,0.442980631754,0.110849377179,0.332424214899,0.276975193098,0.276707277707,0.221497433101,0.277031284516,0.304612566095,0.193916189994,0.166090075913,0.221642624468,0.33229952883,0.138541302736,0.249283036712,0.138517411946,0.166071763488,0.138451587243,0.0831439096615,0.0830683516737,0.110854647772,0.027692510323,0.0831602985122,0.0552877493547,0.0829777128657,0.1385785816,0.0831024374055,0.166163864213,0.110792208559,0.082990793169,0.0553269133218,0.166358568376,0.0277086914277,0.0,0.05547368202,0.0554217455213,0.0554250155971,0.0276936490788,0.027723379839,0.0831042840365,0.0276409739277,0.0276409739277,0.0553332226446,0.0830456535002,0.0277207445425,0.0276899019566,0.0,0.0552912887309,0.0,0.0,0.0829227755915,0.027661510004,0.0276409739277,0.0,0.0,0.0,0.0276650378388,0.0276957688574,0.0,0.0276433360766,0.0554014325796,0.0,0.055449983588,0.0276605251341,0.0,0.0,0.055355266803,0.0,0.0276859432413,0.0276409739277,0.0,0.0,0.0276899019566,0.0,0.0,0.0553598064377,0.0,0.0,0.0,0.0,0.0,0.0,0.027661510004,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0276433360766,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0277272962357,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.027698200255,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0])
# Creating weights for histo: y12_PT_14
y12_PT_14_weights = numpy.array([0.0,0.0,2281.34810305,1248.1459041,782.38994086,519.614439006,370.046978915,271.586805943,206.447285556,158.375426611,126.380881301,99.8012689648,83.9930847761,69.47411255,56.8422505036,46.5590601859,40.1961604614,33.9367860656,29.9124482693,25.1757397,22.2903200904,19.4893773917,17.481556436,15.2527095103,13.2683000856,11.6947636359,10.9799776765,9.85038647869,8.34735769573,7.48132523623,6.56324490192,6.0188260733,5.6962930938,5.59590143919,4.21472002624,4.36566068367,3.96204783186,3.79112180997,3.37723288597,2.95445631857,2.65115016506,2.75223299992,2.44015571453,1.83485416198,1.91571193424,2.1070420338,1.6228372016,1.62344585286,1.6330010101,1.32092129739,1.36132287613,1.27014473303,1.37134954088,1.03868430808,1.3107222927,0.927517159553,1.02825956235,0.947651804371,0.806905296979,0.786640790377,0.705715659904,0.705607037197,0.624945271278,0.564802698833,0.544487582545,0.534357331785,0.32248279459,0.514241377354,0.433470806678,0.403337714742,0.352771356568,0.221818612276,0.342740504688,0.262031163236,0.342771271008,0.20155088947,0.201783548386,0.141181240953,0.221833176214,0.171425322236,0.111000755534,0.120959151821,0.12102402203,0.120970863655,0.111003668322,0.161300715002,0.0705779377135,0.131043647541,0.0403564062618,0.1714849737,0.0907426206549,0.0704841216771,0.0605223150014,0.0706008152334,0.0604959299997,0.0907420138242,0.0604203795693,0.0806212296244,0.0605042253763,0.0706232072886,0.0402962025815,0.10082110875,0.0503674572541,0.0100965959254,0.0504199481154,0.0302603200742,0.0,0.0302356220621,0.0100991264097,0.0100724501292,0.0605453381607,0.0100787793741,0.0201573827673,0.0201364774474,0.0201574980652,0.0201510535224,0.0100662301139,0.0201783791118,0.0100796471421,0.0302739737665,0.0100724501292,0.0201921177604,0.0201573827673,0.0100991264097,0.050489939976,0.0100786033932,0.0,0.0100939865531,0.0100786033932,0.020192761001,0.0,0.0201166037398,0.0,0.020196863177,0.0100703201532,0.0201523703452,0.0100420418395,0.0,0.0201574859286,0.0,0.0,0.0100420418395,0.0,0.0,0.010082050192,0.0,0.0100662301139,0.0100939865531,0.0,0.0100944356079,0.0100965959254,0.0,0.0,0.0100965959254,0.0,0.0,0.0,0.0,0.0,0.0100724501292,0.0,0.0,0.0100843925587,0.0100987744479,0.0,0.0,0.0100698164837,0.0,0.0100912679513,0.0,0.0,0.0,0.0100787793741,0.0,0.02017566051,0.0,0.0,0.0,0.0,0.0100662301139,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0100987744479,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.010082050192,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0101024275691,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0])
# Creating weights for histo: y12_PT_15
y12_PT_15_weights = numpy.array([0.0,0.0,953.414824987,534.673336065,340.104097197,232.78176445,166.794942499,126.033504493,95.2442967963,74.9721903183,60.0005135616,48.6386354359,40.5219364233,33.4597292136,27.8494979432,23.5924602325,20.2911011658,17.3879418546,14.9269159308,12.7056185869,11.2860878981,10.0153328992,8.84433367585,7.81114885712,7.10417096488,6.26697731265,5.6128552074,4.93974610924,4.49539661095,4.02866285231,3.70644646463,3.40922019147,3.05546769743,2.77804907081,2.4529017224,2.21803858339,2.19549465532,1.91269811204,1.72566642081,1.67765414453,1.53896560726,1.30716731903,1.25897806167,1.11749168422,1.20520389671,0.899759545158,0.868609719256,0.879863792814,0.769567254391,0.871389476579,0.681809257663,0.744069281094,0.580016660571,0.551724309774,0.50357929768,0.475233852556,0.48655102372,0.4866537497,0.441386604011,0.40167935771,0.328223779902,0.339413717053,0.333818267551,0.260281470813,0.297085957934,0.268825002043,0.297062565651,0.254640160124,0.274436341369,0.246130716991,0.215050067609,0.223463864007,0.198071040561,0.183878657709,0.215014709866,0.172546597049,0.178272127651,0.155613969825,0.169739022916,0.135756768579,0.13578700926,0.115992059188,0.121604244945,0.104685122148,0.0877071339021,0.118803557685,0.0650771006795,0.132978742592,0.101783440245,0.0877003624517,0.0481081153843,0.0707222203089,0.0848812075993,0.0679419628219,0.0819882593275,0.0678864830979,0.0480792212978,0.0395910158947,0.0594020481615,0.0339322225524,0.0339385477027,0.0424307544173,0.0311209048843,0.0367986852195,0.0735503781124,0.033962359354,0.033943918694,0.0368015169169,0.033953217896,0.014146106145,0.0141413545875,0.0339482893574,0.0254764662473,0.0311218782803,0.00848535831597,0.0339339846685,0.0197941036409,0.0169811700584,0.00566110179125,0.00283299708207,0.0141301162884,0.0113145857008,0.0056570812426,0.0169757644404,0.00849395728845,0.00847939482275,0.00849543084839,0.016959424469,0.00282583819702,0.00565720820729,0.011303897582,0.00564885931679,0.00848163786569,0.00849826254582,0.0141504614188,0.00565606937246,0.00849118714967,0.0141514309674,0.00282511372878,0.00283620582616,0.00282355398674,0.00849056386844,0.0,0.00283009920911,0.00565997449866,0.00565541531191,0.00848804381163,0.0,0.0,0.00283620582616,0.00566204440792,0.00566691523529,0.0113209916468,0.00566444134746,0.0141406428158,0.00850469157625,0.0,0.0,0.0,0.0,0.0,0.0,0.00282188343916,0.0,0.00282511372878,0.0,0.0,0.0,0.00283299708207,0.00283195866477,0.0,0.00566127877234,0.00282140559022,0.0,0.00565888568023,0.00282745488079,0.00283352841008,0.00283495695526,0.00565491130055,0.00282745488079,0.00282745488079,0.0,0.0,0.0,0.0,0.0,0.00283039161265,0.0,0.00283095602842,0.0,0.0,0.0,0.0,0.00283039161265,0.00283009920911,0.0,0.0,0.00283014691705,0.0,0.0,0.0,0.00282511372878,0.0,0.0,0.0,0.0,0.0,0.00282876800353,0.0,0.0,0.0,0.0,0.00282928740455,0.00283100719904,0.0,0.0,0.0,0.00282188343916,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.00283095602842,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.00282876800353,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.00283248191321,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0])
# Creating weights for histo: y12_PT_16
y12_PT_16_weights = numpy.array([0.0,0.0,155.865102679,90.5995524251,58.5534276364,40.9880787903,29.9660430316,22.7952892868,17.8105752338,14.1703701227,11.1015816073,9.08274441088,7.74045932128,6.52868198292,5.40443131053,4.74409961824,4.03169055227,3.56413329314,3.00189451921,2.62606348555,2.29987800411,2.03636805309,1.92203699557,1.60840647182,1.40268595711,1.34941748721,1.19103359415,1.02218459735,0.953623775411,0.890029871985,0.746297762253,0.711113039839,0.647378166394,0.533191032999,0.487354507235,0.498089261929,0.449471857349,0.415813699626,0.349035977094,0.329116310766,0.336601665163,0.265010874052,0.24226708077,0.240668435852,0.207221319582,0.220951775804,0.196379035368,0.184384885483,0.196523432152,0.158552040871,0.156827550965,0.129507726705,0.117225870363,0.127850826783,0.118860768091,0.108178301104,0.100561252585,0.11419122188,0.0807846123349,0.0943764263744,0.0517625839572,0.0929907371714,0.0928804898723,0.0533315696344,0.0700988131137,0.0624711061748,0.0653813512753,0.0639656956038,0.0564216489947,0.0441667459321,0.0410972626105,0.0487391610828,0.0532429345872,0.0411318611198,0.0381100807293,0.0365788958142,0.0320139722806,0.0349517378484,0.025893179308,0.01977799914,0.0289648250363,0.0335237340067,0.0335206026525,0.0198135075144,0.0243448723847,0.0213271095806,0.00916436404311,0.0213299691568,0.0167211830322,0.0198056732208,0.0244139157896,0.0167093902343,0.0228174214617,0.0106620044746,0.019838924657,0.0137273721333,0.00611216815977,0.012194674715,0.0137677488772,0.0106636528667,0.00457991622096,0.0167652701354,0.00760271046127,0.0137001352604,0.0183276149785,0.00914137163213,0.00458124675106,0.0106250911265,0.00760069694147,0.00456050773342,0.00912955874629,0.0136986227572,0.0106301426507,0.00304548060574,0.00764074637026,0.00455358803157,0.00306925644608,0.00610326920576,0.00458307120797,0.00152273262219,0.00912857916418,0.00305877999857,0.00913147182643,0.00154508221934,0.00458420676697,0.0030493693931,0.0015241730451,0.00153301055367,0.0030396834648,0.0060809385149,0.00456283438864,0.00609078396501,0.00457451138553,0.00152463624919,0.00151695202426,0.00153533130067,0.0,0.004566322599,0.0,0.0,0.0015241730451,0.0,0.00458673311987,0.00304717626355,0.0,0.00304624158388,0.00153301055367,0.00152062693423,0.00151233298147,0.00151695202426,0.00151233298147,0.0030426706585,0.00152062693423,0.00305718359877,0.00303703540267,0.0,0.0,0.00151695202426,0.0,0.00151083583969,0.00153120618469,0.0,0.00304815820895,0.001521306379,0.00152885944154,0.0,0.0,0.0,0.001521306379,0.0,0.00152062693423,0.00152612039289,0.0,0.00153533130067,0.0,0.0,0.0,0.0,0.00152062693423,0.0,0.0,0.0,0.00152160533469,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.00152776405841,0.0,0.0,0.0,0.00151083583969,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.00152885944154,0.0,0.001521306379,0.00151849643176,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0])
# Creating weights for histo: y12_PT_17
y12_PT_17_weights = numpy.array([0.0,0.0,53.615678796,32.1064005093,21.3365064877,15.1067623399,11.1784174291,8.59049144714,6.756794224,5.43296271276,4.39066670123,3.65414839012,3.06056763407,2.57318986965,2.1756891805,1.89626302107,1.63134805982,1.40152817661,1.25237467264,1.06184197427,0.981353908801,0.8467828515,0.751500134418,0.656337191816,0.597275278832,0.540427460659,0.485350841308,0.454476754594,0.406974041753,0.370518310217,0.325902701524,0.289987457144,0.278958884907,0.241768600748,0.227868483492,0.218300081556,0.200975299963,0.179118344475,0.162341983652,0.146965905891,0.142106374092,0.138125393882,0.110142994325,0.109421381968,0.100377445906,0.0966040106067,0.0911803833711,0.0796271912088,0.079267944794,0.0700661837102,0.0657282678467,0.0641028780858,0.0539892134204,0.0557933791094,0.0539905998775,0.0491146616711,0.0411668354092,0.0428006209374,0.035568499186,0.0316003899181,0.0362981683995,0.0285290333378,0.032865300858,0.0254585509957,0.0274453746923,0.0247371350526,0.0265466000883,0.0240170054335,0.0258183712496,0.0198628721484,0.0149927108462,0.0176978232552,0.015346010901,0.0200425454223,0.0146249800848,0.013722535221,0.0153457451634,0.0126385145562,0.0108321998589,0.0111927903667,0.0140848934615,0.0095700310229,0.00957120566009,0.0101105412859,0.00866688522805,0.00812617084782,0.00812264693627,0.00523695620511,0.0074038036412,0.00668260336922,0.00650262584504,0.00704182667638,0.00758253720534,0.00596026312148,0.00631669810955,0.00614050638325,0.00415083274718,0.00451231289824,0.00596037865956,0.00559925282529,0.00505672834842,0.00505166778035,0.00469381937639,0.00433325967873,0.0037949323153,0.0050550029797,0.00324901756592,0.00415300101188,0.0034298797267,0.00307013380227,0.00306983532889,0.00325273596658,0.00234791448439,0.00306929268502,0.00216798510109,0.00180663550806,0.00198712255519,0.00216633560238,0.00252784502309,0.00307030210275,0.00216683164588,0.00180527639507,0.0012630249732,0.00144429228751,0.00126359496108,0.00198604843614,0.00162379841631,0.00162461527056,0.00162596090411,0.00144414016237,0.000722141137107,0.00180755981273,0.00108284602763,0.000902456417616,0.000541291685524,0.000721236473912,0.00108263497806,0.000903983060828,0.00144583318042,0.00234780895961,0.00108198603916,0.000722259756206,0.000361421226637,0.000542467863216,0.000542271063347,0.00162578104983,0.000722973396436,0.000722755029458,0.000722284019204,0.000541477316712,0.000180410522403,0.00108446125004,0.000361242566247,0.000361022620249,0.000902385939385,0.000723462892784,0.000541273969685,0.000722252053667,0.000361082892616,0.00126483545497,0.000542031899514,0.000362272819339,0.000541922523461,0.000180570234547,0.000360360933644,0.00036142269012,0.000361302800102,0.00108419281655,0.0,0.000361953934228,0.000542670054862,0.000541128776827,0.0,0.000180752977283,0.000541297847556,0.000360949446129,0.0,0.000180408635281,0.000180070301259,0.000180070301259,0.000180553905165,0.000180619915923,0.000180269758504,0.000361101648298,0.0,0.000360962810034,0.000180833930967,0.00018069305153,0.0,0.000180614023481,0.0,0.0,0.0,0.000180833930967,0.0,0.0,0.0,0.0,0.0,0.000360749950371,0.0,0.0,0.0,0.000361176555489,0.0,0.0,0.000180553905165,0.000361173821088,0.0,0.0,0.000361419493566,0.000360558619305,0.0,0.0,0.000361447915935,0.0,0.0,0.0,0.0,0.000180374705597,0.0,0.0,0.0,0.0,0.00018069305153,0.000180916656235,0.000361312967453,0.000180221309534,0.000180724054249,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.000181037277994,0.0,0.0,0.000180833930967,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.000180619915923,0.0,0.0,0.0,0.0,0.000180801272202,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0])
# Creating a new Canvas
fig = plt.figure(figsize=(12,6),dpi=80)
frame = gridspec.GridSpec(1,1,right=0.7)
pad = fig.add_subplot(frame[0])
# Creating a new Stack
pad.hist(x=xData, bins=xBinning, weights=y12_PT_0_weights+y12_PT_1_weights+y12_PT_2_weights+y12_PT_3_weights+y12_PT_4_weights+y12_PT_5_weights+y12_PT_6_weights+y12_PT_7_weights+y12_PT_8_weights+y12_PT_9_weights+y12_PT_10_weights+y12_PT_11_weights+y12_PT_12_weights+y12_PT_13_weights+y12_PT_14_weights+y12_PT_15_weights+y12_PT_16_weights+y12_PT_17_weights,\
label="$bg\_dip\_1600\_inf$", histtype="step", rwidth=1.0,\
color=None, edgecolor="#f2f2f2", linewidth=1, linestyle="solid",\
bottom=None, cumulative=False, normed=False, align="mid", orientation="vertical")
pad.hist(x=xData, bins=xBinning, weights=y12_PT_0_weights+y12_PT_1_weights+y12_PT_2_weights+y12_PT_3_weights+y12_PT_4_weights+y12_PT_5_weights+y12_PT_6_weights+y12_PT_7_weights+y12_PT_8_weights+y12_PT_9_weights+y12_PT_10_weights+y12_PT_11_weights+y12_PT_12_weights+y12_PT_13_weights+y12_PT_14_weights+y12_PT_15_weights+y12_PT_16_weights,\
label="$bg\_dip\_1200\_1600$", histtype="step", rwidth=1.0,\
color=None, edgecolor="#ccc6aa", linewidth=1, linestyle="solid",\
bottom=None, cumulative=False, normed=False, align="mid", orientation="vertical")
pad.hist(x=xData, bins=xBinning, weights=y12_PT_0_weights+y12_PT_1_weights+y12_PT_2_weights+y12_PT_3_weights+y12_PT_4_weights+y12_PT_5_weights+y12_PT_6_weights+y12_PT_7_weights+y12_PT_8_weights+y12_PT_9_weights+y12_PT_10_weights+y12_PT_11_weights+y12_PT_12_weights+y12_PT_13_weights+y12_PT_14_weights+y12_PT_15_weights,\
label="$bg\_dip\_800\_1200$", histtype="step", rwidth=1.0,\
color=None, edgecolor="#ccc6aa", linewidth=1, linestyle="solid",\
bottom=None, cumulative=False, normed=False, align="mid", orientation="vertical")
pad.hist(x=xData, bins=xBinning, weights=y12_PT_0_weights+y12_PT_1_weights+y12_PT_2_weights+y12_PT_3_weights+y12_PT_4_weights+y12_PT_5_weights+y12_PT_6_weights+y12_PT_7_weights+y12_PT_8_weights+y12_PT_9_weights+y12_PT_10_weights+y12_PT_11_weights+y12_PT_12_weights+y12_PT_13_weights+y12_PT_14_weights,\
label="$bg\_dip\_600\_800$", histtype="step", rwidth=1.0,\
color=None, edgecolor="#c1bfa8", linewidth=1, linestyle="solid",\
bottom=None, cumulative=False, normed=False, align="mid", orientation="vertical")
pad.hist(x=xData, bins=xBinning, weights=y12_PT_0_weights+y12_PT_1_weights+y12_PT_2_weights+y12_PT_3_weights+y12_PT_4_weights+y12_PT_5_weights+y12_PT_6_weights+y12_PT_7_weights+y12_PT_8_weights+y12_PT_9_weights+y12_PT_10_weights+y12_PT_11_weights+y12_PT_12_weights+y12_PT_13_weights,\
label="$bg\_dip\_400\_600$", histtype="step", rwidth=1.0,\
color=None, edgecolor="#bab5a3", linewidth=1, linestyle="solid",\
bottom=None, cumulative=False, normed=False, align="mid", orientation="vertical")
pad.hist(x=xData, bins=xBinning, weights=y12_PT_0_weights+y12_PT_1_weights+y12_PT_2_weights+y12_PT_3_weights+y12_PT_4_weights+y12_PT_5_weights+y12_PT_6_weights+y12_PT_7_weights+y12_PT_8_weights+y12_PT_9_weights+y12_PT_10_weights+y12_PT_11_weights+y12_PT_12_weights,\
label="$bg\_dip\_200\_400$", histtype="step", rwidth=1.0,\
color=None, edgecolor="#b2a596", linewidth=1, linestyle="solid",\
bottom=None, cumulative=False, normed=False, align="mid", orientation="vertical")
pad.hist(x=xData, bins=xBinning, weights=y12_PT_0_weights+y12_PT_1_weights+y12_PT_2_weights+y12_PT_3_weights+y12_PT_4_weights+y12_PT_5_weights+y12_PT_6_weights+y12_PT_7_weights+y12_PT_8_weights+y12_PT_9_weights+y12_PT_10_weights+y12_PT_11_weights,\
label="$bg\_dip\_100\_200$", histtype="step", rwidth=1.0,\
color=None, edgecolor="#b7a39b", linewidth=1, linestyle="solid",\
bottom=None, cumulative=False, normed=False, align="mid", orientation="vertical")
pad.hist(x=xData, bins=xBinning, weights=y12_PT_0_weights+y12_PT_1_weights+y12_PT_2_weights+y12_PT_3_weights+y12_PT_4_weights+y12_PT_5_weights+y12_PT_6_weights+y12_PT_7_weights+y12_PT_8_weights+y12_PT_9_weights+y12_PT_10_weights,\
label="$bg\_dip\_0\_100$", histtype="step", rwidth=1.0,\
color=None, edgecolor="#ad998c", linewidth=1, linestyle="solid",\
bottom=None, cumulative=False, normed=False, align="mid", orientation="vertical")
pad.hist(x=xData, bins=xBinning, weights=y12_PT_0_weights+y12_PT_1_weights+y12_PT_2_weights+y12_PT_3_weights+y12_PT_4_weights+y12_PT_5_weights+y12_PT_6_weights+y12_PT_7_weights+y12_PT_8_weights+y12_PT_9_weights,\
label="$bg\_vbf\_1600\_inf$", histtype="step", rwidth=1.0,\
color=None, edgecolor="#9b8e82", linewidth=1, linestyle="solid",\
bottom=None, cumulative=False, normed=False, align="mid", orientation="vertical")
pad.hist(x=xData, bins=xBinning, weights=y12_PT_0_weights+y12_PT_1_weights+y12_PT_2_weights+y12_PT_3_weights+y12_PT_4_weights+y12_PT_5_weights+y12_PT_6_weights+y12_PT_7_weights+y12_PT_8_weights,\
label="$bg\_vbf\_1200\_1600$", histtype="step", rwidth=1.0,\
color=None, edgecolor="#876656", linewidth=1, linestyle="solid",\
bottom=None, cumulative=False, normed=False, align="mid", orientation="vertical")
pad.hist(x=xData, bins=xBinning, weights=y12_PT_0_weights+y12_PT_1_weights+y12_PT_2_weights+y12_PT_3_weights+y12_PT_4_weights+y12_PT_5_weights+y12_PT_6_weights+y12_PT_7_weights,\
label="$bg\_vbf\_800\_1200$", histtype="step", rwidth=1.0,\
color=None, edgecolor="#afcec6", linewidth=1, linestyle="solid",\
bottom=None, cumulative=False, normed=False, align="mid", orientation="vertical")
pad.hist(x=xData, bins=xBinning, weights=y12_PT_0_weights+y12_PT_1_weights+y12_PT_2_weights+y12_PT_3_weights+y12_PT_4_weights+y12_PT_5_weights+y12_PT_6_weights,\
label="$bg\_vbf\_600\_800$", histtype="step", rwidth=1.0,\
color=None, edgecolor="#84c1a3", linewidth=1, linestyle="solid",\
bottom=None, cumulative=False, normed=False, align="mid", orientation="vertical")
pad.hist(x=xData, bins=xBinning, weights=y12_PT_0_weights+y12_PT_1_weights+y12_PT_2_weights+y12_PT_3_weights+y12_PT_4_weights+y12_PT_5_weights,\
label="$bg\_vbf\_400\_600$", histtype="step", rwidth=1.0,\
color=None, edgecolor="#89a8a0", linewidth=1, linestyle="solid",\
bottom=None, cumulative=False, normed=False, align="mid", orientation="vertical")
pad.hist(x=xData, bins=xBinning, weights=y12_PT_0_weights+y12_PT_1_weights+y12_PT_2_weights+y12_PT_3_weights+y12_PT_4_weights,\
label="$bg\_vbf\_200\_400$", histtype="step", rwidth=1.0,\
color=None, edgecolor="#829e8c", linewidth=1, linestyle="solid",\
bottom=None, cumulative=False, normed=False, align="mid", orientation="vertical")
pad.hist(x=xData, bins=xBinning, weights=y12_PT_0_weights+y12_PT_1_weights+y12_PT_2_weights+y12_PT_3_weights,\
label="$bg\_vbf\_100\_200$", histtype="step", rwidth=1.0,\
color=None, edgecolor="#adbcc6", linewidth=1, linestyle="solid",\
bottom=None, cumulative=False, normed=False, align="mid", orientation="vertical")
pad.hist(x=xData, bins=xBinning, weights=y12_PT_0_weights+y12_PT_1_weights+y12_PT_2_weights,\
label="$bg\_vbf\_0\_100$", histtype="step", rwidth=1.0,\
color=None, edgecolor="#7a8e99", linewidth=1, linestyle="solid",\
bottom=None, cumulative=False, normed=False, align="mid", orientation="vertical")
pad.hist(x=xData, bins=xBinning, weights=y12_PT_0_weights+y12_PT_1_weights,\
label="$signal2$", histtype="step", rwidth=1.0,\
color=None, edgecolor="#758991", linewidth=1, linestyle="solid",\
bottom=None, cumulative=False, normed=False, align="mid", orientation="vertical")
pad.hist(x=xData, bins=xBinning, weights=y12_PT_0_weights,\
label="$signal1$", histtype="step", rwidth=1.0,\
color=None, edgecolor="#688296", linewidth=1, linestyle="solid",\
bottom=None, cumulative=False, normed=False, align="mid", orientation="vertical")
# Axis
plt.rc('text',usetex=False)
plt.xlabel(r"p_{T} [ a_{2} ] ( GeV ) ",\
fontsize=16,color="black")
plt.ylabel(r"$\mathrm{Events}$ $(\mathcal{L}_{\mathrm{int}} = 40.0\ \mathrm{fb}^{-1})$ ",\
fontsize=16,color="black")
# Boundary of y-axis
ymax=(y12_PT_0_weights+y12_PT_1_weights+y12_PT_2_weights+y12_PT_3_weights+y12_PT_4_weights+y12_PT_5_weights+y12_PT_6_weights+y12_PT_7_weights+y12_PT_8_weights+y12_PT_9_weights+y12_PT_10_weights+y12_PT_11_weights+y12_PT_12_weights+y12_PT_13_weights+y12_PT_14_weights+y12_PT_15_weights+y12_PT_16_weights+y12_PT_17_weights).max()*1.1
ymin=0 # linear scale
#ymin=min([x for x in (y12_PT_0_weights+y12_PT_1_weights+y12_PT_2_weights+y12_PT_3_weights+y12_PT_4_weights+y12_PT_5_weights+y12_PT_6_weights+y12_PT_7_weights+y12_PT_8_weights+y12_PT_9_weights+y12_PT_10_weights+y12_PT_11_weights+y12_PT_12_weights+y12_PT_13_weights+y12_PT_14_weights+y12_PT_15_weights+y12_PT_16_weights+y12_PT_17_weights) if x])/100. # log scale
plt.gca().set_ylim(ymin,ymax)
# Log/Linear scale for X-axis
plt.gca().set_xscale("linear")
#plt.gca().set_xscale("log",nonposx="clip")
# Log/Linear scale for Y-axis
plt.gca().set_yscale("linear")
#plt.gca().set_yscale("log",nonposy="clip")
# Legend
plt.legend(bbox_to_anchor=(1.05,1), loc=2, borderaxespad=0.)
# Saving the image
plt.savefig('../../HTML/MadAnalysis5job_0/selection_11.png')
plt.savefig('../../PDF/MadAnalysis5job_0/selection_11.png')
plt.savefig('../../DVI/MadAnalysis5job_0/selection_11.eps')
# Running!
if __name__ == '__main__':
selection_11()
| 391.841584
| 6,295
| 0.7653
| 17,214
| 79,152
| 3.472697
| 0.202626
| 0.270931
| 0.391442
| 0.503454
| 0.289466
| 0.266615
| 0.264558
| 0.251142
| 0.248365
| 0.239164
| 0
| 0.682472
| 0.021882
| 79,152
| 201
| 6,296
| 393.791045
| 0.089667
| 0.017346
| 0
| 0.186441
| 0
| 0.008475
| 0.013879
| 0.002611
| 0
| 0
| 0
| 0
| 0
| 1
| 0.008475
| false
| 0
| 0.033898
| 0
| 0.042373
| 0
| 0
| 0
| 0
| null | 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 7
|
6338720171e7d3dce905a5d474b0b25b6422d9be
| 147,059
|
py
|
Python
|
core/domain/user_id_migration_test.py
|
mzaman07/oppia
|
cac5737ba63a0a209d47d20f3b464495da12bd59
|
[
"Apache-2.0"
] | null | null | null |
core/domain/user_id_migration_test.py
|
mzaman07/oppia
|
cac5737ba63a0a209d47d20f3b464495da12bd59
|
[
"Apache-2.0"
] | null | null | null |
core/domain/user_id_migration_test.py
|
mzaman07/oppia
|
cac5737ba63a0a209d47d20f3b464495da12bd59
|
[
"Apache-2.0"
] | null | null | null |
# coding: utf-8
#
# Copyright 2019 The Oppia Authors. All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS-IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
"""Tests for user-related one-off computations."""
from __future__ import absolute_import # pylint: disable=import-only-modules
from __future__ import unicode_literals # pylint: disable=import-only-modules
import ast
from constants import constants
from core.domain import rights_manager
from core.domain import topic_domain
from core.domain import user_id_migration
from core.platform import models
from core.tests import test_utils
import feconf
from google.appengine.ext import ndb
(
activity_models, base_models,
collection_models, exp_models,
feedback_models, question_models, skill_models,
topic_models, user_models) = models.Registry.import_models(
[models.NAMES.activity, models.NAMES.base_model,
models.NAMES.collection, models.NAMES.exploration,
models.NAMES.feedback, models.NAMES.question, models.NAMES.skill,
models.NAMES.topic, models.NAMES.user])
taskqueue_services = models.Registry.import_taskqueue_services()
search_services = models.Registry.import_search_services()
class HelperFunctionsTests(test_utils.GenericTestBase):
"""Tests for UserIdMigrationJobTests."""
USER_1_USER_ID = 'uid_aaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaa'
USER_1_GAE_ID = 'gae_id_1'
USER_2_USER_ID = 'uid_bbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbb'
USER_2_GAE_ID = 'gae_id_2'
USER_3_USER_ID = 'uid_cccccccccccccccccccccccccccccccc'
USER_3_GAE_ID = 'gae_id_3'
def setUp(self):
super(HelperFunctionsTests, self).setUp()
user_models.UserSettingsModel(
id=self.USER_1_USER_ID,
gae_id=self.USER_1_GAE_ID,
email='some@email.com'
).put()
user_models.UserSettingsModel(
id=self.USER_2_USER_ID,
gae_id=self.USER_2_GAE_ID,
email='some.different@email.com'
).put()
user_models.UserSettingsModel(
id=self.USER_3_USER_ID,
gae_id=self.USER_3_GAE_ID,
email='some.different@email.cz'
).put()
def test_verify_user_id_correct(self):
self.assertTrue(
user_id_migration.verify_user_id_correct('uid_' + 'a' * 32))
self.assertFalse(
user_id_migration.verify_user_id_correct('uid_' + 'a' * 31 + 'A'))
self.assertFalse(
user_id_migration.verify_user_id_correct('uid_' + 'a' * 31))
self.assertFalse(user_id_migration.verify_user_id_correct('a' * 36))
def test_replace_gae_ids(self):
self.assertEqual(
user_id_migration.get_user_ids_corresponding_to_gae_ids(
[self.USER_1_GAE_ID, self.USER_2_GAE_ID, self.USER_3_GAE_ID]),
[self.USER_1_USER_ID, self.USER_2_USER_ID, self.USER_3_USER_ID]
)
with self.assertRaises(user_id_migration.AlreadyMigratedUserException):
user_id_migration.get_user_ids_corresponding_to_gae_ids(
[self.USER_1_USER_ID, 'nonexistent_gae_id'])
with self.assertRaises(user_id_migration.MissingUserException):
user_id_migration.get_user_ids_corresponding_to_gae_ids(
[self.USER_1_GAE_ID, 'nonexistent_gae_id'])
self.assertEqual(
user_id_migration.get_user_ids_corresponding_to_gae_ids(
[self.USER_1_GAE_ID, feconf.SYSTEM_COMMITTER_ID]),
[self.USER_1_USER_ID, feconf.SYSTEM_COMMITTER_ID]
)
def test_replace_gae_id(self):
self.assertEqual(
user_id_migration.get_user_id_corresponding_to_gae_id(
self.USER_1_GAE_ID),
self.USER_1_USER_ID
)
with self.assertRaises(user_id_migration.AlreadyMigratedUserException):
user_id_migration.get_user_id_corresponding_to_gae_id(
self.USER_1_USER_ID)
with self.assertRaises(user_id_migration.MissingUserException):
user_id_migration.get_user_id_corresponding_to_gae_id(
'nonexistent_gae_id')
self.assertEqual(
user_id_migration.get_user_id_corresponding_to_gae_id(
feconf.SYSTEM_COMMITTER_ID),
feconf.SYSTEM_COMMITTER_ID
)
def test_are_commit_cmds_role_change(self):
self.assertTrue(
user_id_migration.are_commit_cmds_role_change([{
'cmd': rights_manager.CMD_CHANGE_ROLE,
'assignee_id': self.USER_1_GAE_ID,
'old_role': rights_manager.ROLE_NONE,
'new_role': rights_manager.ROLE_EDITOR
}])
)
self.assertTrue(
user_id_migration.are_commit_cmds_role_change([{
'cmd': topic_domain.CMD_CHANGE_ROLE,
'assignee_id': self.USER_1_GAE_ID,
'old_role': rights_manager.ROLE_NONE,
'new_role': rights_manager.ROLE_EDITOR
}])
)
self.assertTrue(
user_id_migration.are_commit_cmds_role_change([{
'cmd': topic_domain.CMD_REMOVE_MANAGER_ROLE,
'removed_user_id': self.USER_1_GAE_ID
}])
)
self.assertFalse(
user_id_migration.are_commit_cmds_role_change([
{
'cmd': topic_domain.CMD_REMOVE_MANAGER_ROLE,
'removed_user_id': self.USER_1_GAE_ID
},
{
'cmd': topic_domain.CMD_REMOVE_MANAGER_ROLE,
'removed_user_id': self.USER_1_GAE_ID
}
])
)
self.assertFalse(
user_id_migration.are_commit_cmds_role_change([{
'cmd': topic_domain.CMD_PUBLISH_TOPIC
}])
)
class CreateNewUsersMigrationJobTests(test_utils.GenericTestBase):
"""Tests for UserIdMigrationJobTests."""
USER_A_ID = 'user_1_id'
USER_A_EMAIL = 'a@example.com'
USER_B_ID = 'user_2_id'
USER_B_EMAIL = 'b@example.com'
USER_C_ID = 'user_3_id'
USER_C_EMAIL = 'c@example.com'
def _run_one_off_job(self):
"""Runs the one-off MapReduce job."""
job_id = user_id_migration.CreateNewUsersMigrationJob.create_new()
user_id_migration.CreateNewUsersMigrationJob.enqueue(job_id)
self.assertEqual(
self.count_jobs_in_taskqueue(
taskqueue_services.QUEUE_NAME_ONE_OFF_JOBS), 1)
self.process_and_flush_pending_tasks()
stringified_output = (
user_id_migration.CreateNewUsersMigrationJob.get_output(job_id))
return [ast.literal_eval(item) for item in stringified_output]
def setUp(self):
def empty(*_):
"""Function that takes any number of arguments and does nothing."""
pass
# We don't want to signup the superadmin user.
with self.swap(test_utils.TestBase, 'signup_superadmin_user', empty):
super(CreateNewUsersMigrationJobTests, self).setUp()
def test_one_user_user_settings_model(self):
original_model = user_models.UserSettingsModel(
id=self.USER_A_ID,
gae_id=self.USER_A_ID,
gae_user_id=self.USER_A_ID,
email=self.USER_A_EMAIL,
)
original_model.put()
output = self._run_one_off_job()
self.assertEqual(output, [['SUCCESS', 1]])
migrated_model = user_models.UserSettingsModel.query(
user_models.UserSettingsModel.gae_id == self.USER_A_ID).get()
self.assertNotEqual(original_model.id, migrated_model.id)
self.assertEqual(original_model.gae_user_id, migrated_model.gae_user_id)
self.assertEqual(original_model.email, migrated_model.email)
self.assertEqual(original_model.created_on, migrated_model.created_on)
self.assertEqual(
original_model.last_updated, migrated_model.last_updated)
self.assertIsNone(
user_models.UserSettingsModel.get_by_id(self.USER_A_ID))
def test_multiple_user_user_settings_model(self):
original_models = {}
original_models[self.USER_A_ID] = user_models.UserSettingsModel(
id=self.USER_A_ID,
gae_id=self.USER_A_ID,
email=self.USER_A_EMAIL,
)
original_models[self.USER_A_ID].put()
original_models[self.USER_B_ID] = user_models.UserSettingsModel(
id=self.USER_B_ID,
gae_id=self.USER_B_ID,
email=self.USER_B_EMAIL,
)
original_models[self.USER_B_ID].put()
original_models[self.USER_C_ID] = user_models.UserSettingsModel(
id=self.USER_C_ID,
gae_id=self.USER_C_ID,
email=self.USER_C_EMAIL,
)
original_models[self.USER_C_ID].put()
output = self._run_one_off_job()
self.assertIn(['SUCCESS', 3], output)
for user_id, original_model in original_models.items():
migrated_model = user_models.UserSettingsModel.query(
user_models.UserSettingsModel.gae_id == user_id).get()
self.assertNotEqual(original_model.id, migrated_model.id)
self.assertEqual(
original_model.gae_user_id, migrated_model.gae_user_id)
self.assertEqual(original_model.email, migrated_model.email)
self.assertEqual(
original_model.created_on, migrated_model.created_on)
self.assertEqual(
original_model.last_updated, migrated_model.last_updated)
self.assertIsNone(
user_models.UserSettingsModel.get_by_id(migrated_model.gae_id))
def test_repeated_run(self):
original_model = user_models.UserSettingsModel(
id=self.USER_A_ID,
gae_id=self.USER_A_ID,
gae_user_id=self.USER_A_ID,
email=self.USER_A_EMAIL,
)
original_model.put()
output = self._run_one_off_job()
self.assertEqual(output, [['SUCCESS', 1]])
migrated_model = user_models.UserSettingsModel.query(
user_models.UserSettingsModel.gae_id == self.USER_A_ID).get()
self.assertNotEqual(original_model.id, migrated_model.id)
self.assertEqual(original_model.gae_user_id, migrated_model.gae_user_id)
self.assertEqual(original_model.email, migrated_model.email)
self.assertEqual(original_model.created_on, migrated_model.created_on)
self.assertEqual(
original_model.last_updated, migrated_model.last_updated)
self.assertIsNone(
user_models.UserSettingsModel.get_by_id(self.USER_A_ID))
output = self._run_one_off_job()
self.assertEqual(output, [['SUCCESS_ALREADY_MIGRATED', 1]])
class UserIdMigrationJobTests(test_utils.GenericTestBase):
"""Tests for UserIdMigrationJobTests."""
USER_A_ID = 'user_0_id'
USER_A_GAE_ID = 'gae_0_id'
USER_A_EMAIL = 'a@example.com'
USER_A_USERNAME = 'a'
USER_B_ID = 'user_1_id'
USER_B_GAE_ID = 'gae_1_id'
USER_B_EMAIL = 'b@example.com'
USER_B_USERNAME = 'b'
USER_C_ID = 'user_2_id'
USER_C_GAE_ID = 'gae_2_id'
USER_C_EMAIL = 'c@example.com'
USER_C_USERNAME = 'c'
USER_D_ID = 'user_3_id'
USER_D_GAE_ID = 'gae_3_id'
USER_D_EMAIL = 'd@example.com'
USER_D_USERNAME = 'd'
def _run_one_off_job(self):
"""Runs the one-off MapReduce job."""
job_id = user_id_migration.UserIdMigrationJob.create_new()
user_id_migration.UserIdMigrationJob.enqueue(job_id)
self.assertEqual(
self.count_jobs_in_taskqueue(
taskqueue_services.QUEUE_NAME_ONE_OFF_JOBS), 1)
self.process_and_flush_pending_tasks()
stringified_output = (
user_id_migration.UserIdMigrationJob.get_output(job_id))
return [ast.literal_eval(item) for item in stringified_output]
def setUp(self):
def empty(*_):
"""Function that takes any number of arguments and does nothing."""
pass
# We don't want to signup the superadmin user.
with self.swap(test_utils.TestBase, 'signup_superadmin_user', empty):
super(UserIdMigrationJobTests, self).setUp()
user_models.UserSettingsModel(
id=self.USER_A_ID,
gae_id=self.USER_A_GAE_ID,
email=self.USER_A_EMAIL,
).put()
def test_one_user_two_models_full_id(self):
original_model_1 = user_models.CompletedActivitiesModel(
id=self.USER_A_GAE_ID,
exploration_ids=['1', '2'],
collection_ids=['1', '2'])
original_model_1.put()
original_model_2 = user_models.UserStatsModel(
id=self.USER_A_GAE_ID,
impact_score=1,
average_ratings=1.1)
original_model_2.put()
output = self._run_one_off_job()
self.assertItemsEqual(
output, [['SUCCESS', 1], ['SUCCESS_MISSING_OLD_MODEL', 10]])
migrated_model_1 = (
user_models.CompletedActivitiesModel.get_by_id(self.USER_A_ID))
self.assertNotEqual(original_model_1.id, migrated_model_1.id)
self.assertEqual(
original_model_1.exploration_ids, migrated_model_1.exploration_ids)
self.assertEqual(
original_model_1.collection_ids, migrated_model_1.collection_ids)
self.assertEqual(
original_model_1.created_on, migrated_model_1.created_on)
self.assertEqual(
original_model_1.last_updated, migrated_model_1.last_updated)
self.assertIsNone(
user_models.CompletedActivitiesModel.get_by_id(self.USER_A_GAE_ID))
migrated_model_2 = (
user_models.UserStatsModel.get_by_id(self.USER_A_ID))
self.assertNotEqual(original_model_2.id, migrated_model_2.id)
self.assertEqual(
original_model_2.impact_score, migrated_model_2.impact_score)
self.assertEqual(
original_model_2.average_ratings, migrated_model_2.average_ratings)
self.assertEqual(
original_model_2.created_on, migrated_model_2.created_on)
self.assertEqual(
original_model_2.last_updated, migrated_model_2.last_updated)
self.assertIsNone(
user_models.UserStatsModel.get_by_id(self.USER_A_GAE_ID))
def test_multiple_users_one_model_full_id(self):
user_models.UserSettingsModel(
id=self.USER_B_ID,
gae_id=self.USER_B_GAE_ID,
email=self.USER_B_EMAIL,
).put()
user_models.UserSettingsModel(
id=self.USER_C_ID,
gae_id=self.USER_C_GAE_ID,
email=self.USER_C_EMAIL,
).put()
original_models = []
original_models.append(user_models.CompletedActivitiesModel(
id=self.USER_A_GAE_ID,
exploration_ids=['1', '2'],
collection_ids=['11', '22']))
original_models[-1].put()
original_models.append(user_models.CompletedActivitiesModel(
id=self.USER_B_GAE_ID,
exploration_ids=['3', '4'],
collection_ids=['33', '44']))
original_models[-1].put()
original_models.append(user_models.CompletedActivitiesModel(
id=self.USER_C_GAE_ID,
exploration_ids=['5', '6'],
collection_ids=['55', '66']))
original_models[-1].put()
original_models.sort(key=lambda model: model.id)
output = self._run_one_off_job()
self.assertItemsEqual(
output, [['SUCCESS', 3], ['SUCCESS_MISSING_OLD_MODEL', 33]])
for i, user_id in enumerate(
(self.USER_A_ID, self.USER_B_ID, self.USER_C_ID)):
migrated_model = (
user_models.CompletedActivitiesModel.get_by_id(user_id))
self.assertNotEqual(
original_models[i].id, migrated_model.id)
self.assertEqual(
original_models[i].exploration_ids,
migrated_model.exploration_ids)
self.assertEqual(
original_models[i].collection_ids,
migrated_model.collection_ids)
self.assertEqual(
original_models[i].created_on, migrated_model.created_on)
self.assertEqual(
original_models[i].last_updated, migrated_model.last_updated)
self.assertIsNone(
user_models.CompletedActivitiesModel.get_by_id(self.USER_A_GAE_ID))
self.assertIsNone(
user_models.CompletedActivitiesModel.get_by_id(self.USER_B_GAE_ID))
self.assertIsNone(
user_models.CompletedActivitiesModel.get_by_id(self.USER_C_GAE_ID))
def test_one_user_one_model_part_id(self):
original_model = user_models.ExpUserLastPlaythroughModel(
id='%s.%s' % (self.USER_A_GAE_ID, 'exp_id'),
user_id=self.USER_A_GAE_ID,
exploration_id='exp_id',
last_played_exp_version=2,
last_played_state_name='start')
original_model.put()
output = self._run_one_off_job()
self.assertItemsEqual(
output, [['SUCCESS', 1], ['SUCCESS_MISSING_OLD_MODEL', 12]])
migrated_model = (
user_models.ExpUserLastPlaythroughModel.get_by_id(
'%s.%s' % (self.USER_A_ID, 'exp_id')))
self.assertNotEqual(
original_model.id, migrated_model.id)
self.assertNotEqual(
original_model.user_id, migrated_model.user_id)
self.assertEqual(
original_model.exploration_id, migrated_model.exploration_id)
self.assertEqual(
original_model.last_played_exp_version,
migrated_model.last_played_exp_version)
self.assertEqual(
original_model.last_played_state_name,
migrated_model.last_played_state_name)
self.assertEqual(
original_model.created_on, migrated_model.created_on)
self.assertEqual(
original_model.last_updated, migrated_model.last_updated)
self.assertIsNone(
user_models.ExpUserLastPlaythroughModel.get_by_id(
'%s.%s' % (self.USER_A_GAE_ID, 'exp_id')))
def test_one_user_different_one_model_part_id(self):
original_model = user_models.UserContributionScoringModel(
id='%s.%s' % ('category', self.USER_A_GAE_ID),
user_id=self.USER_A_GAE_ID,
score_category='category',
score=1.5,
has_email_been_sent=False
)
original_model.put()
output = self._run_one_off_job()
self.assertItemsEqual(
output, [['SUCCESS', 1], ['SUCCESS_MISSING_OLD_MODEL', 12]])
migrated_model = (
user_models.UserContributionScoringModel.get_by_id(
'%s.%s' % ('category', self.USER_A_ID)))
self.assertNotEqual(
original_model.id, migrated_model.id)
self.assertNotEqual(
original_model.user_id, migrated_model.user_id)
self.assertEqual(
original_model.score_category, migrated_model.score_category)
self.assertEqual(original_model.score, migrated_model.score)
self.assertEqual(
original_model.has_email_been_sent,
migrated_model.has_email_been_sent)
self.assertEqual(original_model.created_on, migrated_model.created_on)
self.assertEqual(
original_model.last_updated, migrated_model.last_updated)
self.assertIsNone(user_models.UserContributionScoringModel.get_by_id(
'%s.%s' % ('category', self.USER_A_GAE_ID)))
def test_multiple_users_one_model_part_id(self):
user_models.UserSettingsModel(
id=self.USER_B_ID,
gae_id=self.USER_B_GAE_ID,
email=self.USER_B_EMAIL,
).put()
user_models.UserSettingsModel(
id=self.USER_C_ID,
gae_id=self.USER_C_GAE_ID,
email=self.USER_C_EMAIL,
).put()
original_models = []
original_models.append(user_models.ExpUserLastPlaythroughModel(
id='%s.%s' % (self.USER_A_GAE_ID, 'exp_id'),
user_id=self.USER_A_GAE_ID,
exploration_id='exp_id',
last_played_exp_version=2,
last_played_state_name='start'))
original_models[-1].put()
original_models.append(user_models.ExpUserLastPlaythroughModel(
id='%s.%s' % (self.USER_B_GAE_ID, 'exp_id'),
user_id=self.USER_B_GAE_ID,
exploration_id='exp_id',
last_played_exp_version=3,
last_played_state_name='start'))
original_models[-1].put()
original_models.append(user_models.ExpUserLastPlaythroughModel(
id='%s.%s' % (self.USER_C_GAE_ID, 'exp_id'),
user_id=self.USER_C_GAE_ID,
exploration_id='exp_id',
last_played_exp_version=4,
last_played_state_name='start'))
original_models[-1].put()
original_models.sort(key=lambda model: model.user_id)
output = self._run_one_off_job()
self.assertItemsEqual(
output, [['SUCCESS', 3], ['SUCCESS_MISSING_OLD_MODEL', 36]])
for i, user_id in enumerate(
(self.USER_A_ID, self.USER_B_ID, self.USER_C_ID)):
migrated_model = (
user_models.ExpUserLastPlaythroughModel.get_by_id(
'%s.%s' % (user_id, 'exp_id')))
self.assertNotEqual(
original_models[i].id, migrated_model.id)
self.assertNotEqual(
original_models[i].user_id, migrated_model.user_id)
self.assertEqual(
original_models[i].exploration_id,
migrated_model.exploration_id)
self.assertEqual(
original_models[i].last_played_exp_version,
migrated_model.last_played_exp_version)
self.assertEqual(
original_models[i].last_played_state_name,
migrated_model.last_played_state_name)
self.assertEqual(
original_models[i].created_on, migrated_model.created_on)
self.assertEqual(
original_models[i].last_updated, migrated_model.last_updated)
self.assertIsNone(
user_models.CompletedActivitiesModel.get_by_id(
self.USER_A_GAE_ID))
self.assertIsNone(
user_models.CompletedActivitiesModel.get_by_id(
self.USER_B_GAE_ID))
self.assertIsNone(
user_models.CompletedActivitiesModel.get_by_id(
self.USER_C_GAE_ID))
def test_multiple_users_different_one_model_part_id(self):
user_models.UserSettingsModel(
id=self.USER_B_ID,
gae_id=self.USER_B_GAE_ID,
email=self.USER_B_EMAIL,
).put()
user_models.UserSettingsModel(
id=self.USER_C_ID,
gae_id=self.USER_C_GAE_ID,
email=self.USER_C_EMAIL,
).put()
original_models = []
original_models.append(user_models.UserContributionScoringModel(
id='%s.%s' % ('score_category', self.USER_A_GAE_ID),
user_id=self.USER_A_GAE_ID,
score_category='score_category',
score=2,
has_email_been_sent=False))
original_models[-1].put()
original_models.append(user_models.UserContributionScoringModel(
id='%s.%s' % ('score_category', self.USER_B_GAE_ID),
user_id=self.USER_B_GAE_ID,
score_category='score_category',
score=2,
has_email_been_sent=False))
original_models[-1].put()
original_models.append(user_models.UserContributionScoringModel(
id='%s.%s' % ('score_category', self.USER_C_GAE_ID),
user_id=self.USER_C_GAE_ID,
score_category='score_category',
score=2,
has_email_been_sent=False))
original_models[-1].put()
original_models.sort(key=lambda model: model.user_id)
output = self._run_one_off_job()
self.assertItemsEqual(
output, [['SUCCESS', 3], ['SUCCESS_MISSING_OLD_MODEL', 36]])
for i, user_id in enumerate(
(self.USER_A_ID, self.USER_B_ID, self.USER_C_ID)):
migrated_model = (
user_models.UserContributionScoringModel.get_by_id(
'%s.%s' % ('score_category', user_id)))
self.assertNotEqual(
original_models[i].id, migrated_model.id)
self.assertNotEqual(
original_models[i].user_id, migrated_model.user_id)
self.assertEqual(
original_models[i].score_category,
migrated_model.score_category)
self.assertEqual(original_models[i].score, migrated_model.score)
self.assertEqual(
original_models[i].has_email_been_sent,
migrated_model.has_email_been_sent)
self.assertEqual(
original_models[i].created_on, migrated_model.created_on)
self.assertEqual(
original_models[i].last_updated, migrated_model.last_updated)
self.assertIsNone(user_models.UserContributionScoringModel.get_by_id(
'%s.%s' % ('score_category', self.USER_A_GAE_ID)))
self.assertIsNone(user_models.UserContributionScoringModel.get_by_id(
'%s.%s' % ('score_category', self.USER_B_GAE_ID)))
self.assertIsNone(user_models.UserContributionScoringModel.get_by_id(
'%s.%s' % ('score_category', self.USER_C_GAE_ID)))
def test_one_user_one_model_user_id_field(self):
original_model = exp_models.ExplorationSnapshotMetadataModel(
id='instance_id',
committer_id=self.USER_A_GAE_ID,
commit_type='create',
commit_message='commit message 2',
commit_cmds=[{'cmd': 'some_command'}])
original_model.put()
output = self._run_one_off_job()
self.assertItemsEqual(
output, [['SUCCESS', 1], ['SUCCESS_MISSING_OLD_MODEL', 12]])
migrated_model = (
exp_models.ExplorationSnapshotMetadataModel.query(
exp_models.ExplorationSnapshotMetadataModel.committer_id ==
self.USER_A_ID
).get())
self.assertNotEqual(
original_model.committer_id, migrated_model.committer_id)
self.assertEqual(original_model.id, migrated_model.id)
self.assertEqual(
original_model.commit_type, migrated_model.commit_type)
self.assertEqual(
original_model.commit_message, migrated_model.commit_message)
self.assertEqual(
original_model.commit_cmds, migrated_model.commit_cmds)
self.assertEqual(
original_model.created_on, migrated_model.created_on)
self.assertEqual(
original_model.last_updated, migrated_model.last_updated)
def test_multiple_users_one_model_user_id_field(self):
user_models.UserSettingsModel(
id=self.USER_B_ID,
gae_id=self.USER_B_GAE_ID,
email=self.USER_B_EMAIL,
).put()
user_models.UserSettingsModel(
id=self.USER_C_ID,
gae_id=self.USER_C_GAE_ID,
email=self.USER_C_EMAIL,
).put()
original_models = []
original_models.append(exp_models.ExplorationSnapshotMetadataModel(
id='instance_id1',
committer_id=self.USER_A_GAE_ID,
commit_type='create',
commit_message='commit message 2',
commit_cmds=[{'cmd': 'some_command'}]))
original_models[-1].put()
original_models.append(exp_models.ExplorationSnapshotMetadataModel(
id='instance_id2',
committer_id=self.USER_B_GAE_ID,
commit_type='create',
commit_message='commit message 2',
commit_cmds=[{'cmd': 'some_command'}]))
original_models[-1].put()
original_models.append(exp_models.ExplorationSnapshotMetadataModel(
id='instance_id3',
committer_id=self.USER_C_GAE_ID,
commit_type='create',
commit_message='commit message 2',
commit_cmds=[{'cmd': 'some_command'}]))
original_models[-1].put()
original_models.sort(key=lambda model: model.committer_id)
output = self._run_one_off_job()
self.assertItemsEqual(
output, [['SUCCESS', 3], ['SUCCESS_MISSING_OLD_MODEL', 36]])
for i, user_id in enumerate(
(self.USER_A_ID, self.USER_B_ID, self.USER_C_ID)):
migrated_model = (
exp_models.ExplorationSnapshotMetadataModel.query(
exp_models.ExplorationSnapshotMetadataModel.committer_id ==
user_id
).get())
self.assertNotEqual(
original_models[i].committer_id, migrated_model.committer_id)
self.assertEqual(original_models[i].id, migrated_model.id)
self.assertEqual(
original_models[i].commit_type, migrated_model.commit_type)
self.assertEqual(
original_models[i].commit_message,
migrated_model.commit_message)
self.assertEqual(
original_models[i].commit_cmds, migrated_model.commit_cmds)
self.assertEqual(
original_models[i].created_on, migrated_model.created_on)
self.assertEqual(
original_models[i].last_updated, migrated_model.last_updated)
def test_idempotent_copy_model_with_new_id_not_migrated(self):
output = self._run_one_off_job()
self.assertItemsEqual(
output, [['SUCCESS', 1], ['SUCCESS_MISSING_OLD_MODEL', 12]])
user_models.IncompleteActivitiesModel(
id=self.USER_A_GAE_ID,
exploration_ids=['1', '2'],
collection_ids=['1', '2']
).put()
user_models.CompletedActivitiesModel(
id=self.USER_A_GAE_ID,
exploration_ids=['1', '2'],
collection_ids=['1', '2']
).put()
output = self._run_one_off_job()
self.assertItemsEqual(
output, [['SUCCESS', 1], ['SUCCESS_MISSING_OLD_MODEL', 10]])
self.assertIsNone(
user_models.CompletedActivitiesModel.get_by_id(self.USER_A_GAE_ID))
self.assertIsNone(
user_models.IncompleteActivitiesModel.get_by_id(self.USER_A_GAE_ID))
self.assertIsNotNone(
user_models.CompletedActivitiesModel.get_by_id(self.USER_A_ID))
self.assertIsNotNone(
user_models.IncompleteActivitiesModel.get_by_id(self.USER_A_ID))
def test_idempotent_copy_model_with_new_id_half_migrated(self):
user_models.IncompleteActivitiesModel(
id=self.USER_A_GAE_ID,
exploration_ids=['1', '2'],
collection_ids=['1', '2']
).put()
output = self._run_one_off_job()
self.assertItemsEqual(
output, [['SUCCESS', 1], ['SUCCESS_MISSING_OLD_MODEL', 11]])
user_models.CompletedActivitiesModel(
id=self.USER_A_GAE_ID,
exploration_ids=['1', '2'],
collection_ids=['1', '2']
).put()
output = self._run_one_off_job()
self.assertItemsEqual(
output, [
['SUCCESS', 1],
['SUCCESS_ALREADY_MIGRATED', 1],
['SUCCESS_MISSING_OLD_MODEL', 10]
])
self.assertIsNone(
user_models.CompletedActivitiesModel.get_by_id(self.USER_A_GAE_ID))
self.assertIsNone(
user_models.IncompleteActivitiesModel.get_by_id(self.USER_A_GAE_ID))
self.assertIsNotNone(
user_models.CompletedActivitiesModel.get_by_id(self.USER_A_ID))
self.assertIsNotNone(
user_models.IncompleteActivitiesModel.get_by_id(self.USER_A_ID))
def test_idempotent_copy_model_with_new_id_all_migrated(self):
user_models.IncompleteActivitiesModel(
id=self.USER_A_GAE_ID,
exploration_ids=['1', '2'],
collection_ids=['1', '2']
).put()
user_models.CompletedActivitiesModel(
id=self.USER_A_GAE_ID,
exploration_ids=['1', '2'],
collection_ids=['1', '2']
).put()
output = self._run_one_off_job()
self.assertItemsEqual(
output, [['SUCCESS', 1], ['SUCCESS_MISSING_OLD_MODEL', 10]])
output = self._run_one_off_job()
self.assertItemsEqual(
output, [
['SUCCESS', 1],
['SUCCESS_ALREADY_MIGRATED', 2],
['SUCCESS_MISSING_OLD_MODEL', 10]
])
self.assertIsNone(
user_models.CompletedActivitiesModel.get_by_id(self.USER_A_GAE_ID))
self.assertIsNone(
user_models.IncompleteActivitiesModel.get_by_id(self.USER_A_GAE_ID))
self.assertIsNotNone(
user_models.CompletedActivitiesModel.get_by_id(self.USER_A_ID))
self.assertIsNotNone(
user_models.IncompleteActivitiesModel.get_by_id(self.USER_A_ID))
def test_idempotent_copy_model_with_new_id_and_user_id_not_migrated(self):
output = self._run_one_off_job()
self.assertItemsEqual(
output, [['SUCCESS', 1], ['SUCCESS_MISSING_OLD_MODEL', 12]])
exp_play_model_id_1 = '%s.%s' % (self.USER_A_GAE_ID, 'exp_1_id')
user_models.ExpUserLastPlaythroughModel(
id=exp_play_model_id_1,
user_id=self.USER_A_GAE_ID,
exploration_id='exp_id',
last_played_exp_version=2,
last_played_state_name='start'
).put()
exp_play_model_id_2 = '%s.%s' % (self.USER_A_GAE_ID, 'exp_2_id')
user_models.ExpUserLastPlaythroughModel(
id=exp_play_model_id_2,
user_id=self.USER_A_GAE_ID,
exploration_id='exp_id',
last_played_exp_version=2,
last_played_state_name='start'
).put()
output = self._run_one_off_job()
self.assertItemsEqual(
output, [['SUCCESS', 1], ['SUCCESS_MISSING_OLD_MODEL', 12]])
self.assertIsNone(
user_models.ExpUserLastPlaythroughModel.get_by_id(
exp_play_model_id_1))
self.assertIsNone(
user_models.ExpUserLastPlaythroughModel.get_by_id(
exp_play_model_id_2))
self.assertIsNotNone(
user_models.ExpUserLastPlaythroughModel.get_by_id(
exp_play_model_id_1.replace(
self.USER_A_GAE_ID, self.USER_A_ID)))
self.assertIsNotNone(
user_models.ExpUserLastPlaythroughModel.get_by_id(
exp_play_model_id_2.replace(
self.USER_A_GAE_ID, self.USER_A_ID)))
def test_idempotent_copy_model_with_new_id_and_user_id_half_migrated(self):
exp_play_model_id_1 = '%s.%s' % (self.USER_A_GAE_ID, 'exp_1_id')
user_models.ExpUserLastPlaythroughModel(
id=exp_play_model_id_1,
user_id=self.USER_A_GAE_ID,
exploration_id='exp_id',
last_played_exp_version=2,
last_played_state_name='start'
).put()
output = self._run_one_off_job()
self.assertItemsEqual(
output, [['SUCCESS', 1], ['SUCCESS_MISSING_OLD_MODEL', 12]])
exp_play_model_id_2 = '%s.%s' % (self.USER_A_GAE_ID, 'exp_2_id')
user_models.ExpUserLastPlaythroughModel(
id=exp_play_model_id_2,
user_id=self.USER_A_GAE_ID,
exploration_id='exp_id',
last_played_exp_version=2,
last_played_state_name='start'
).put()
output = self._run_one_off_job()
self.assertItemsEqual(
output, [['SUCCESS', 1], ['SUCCESS_MISSING_OLD_MODEL', 12]])
self.assertIsNone(
user_models.ExpUserLastPlaythroughModel.get_by_id(
exp_play_model_id_1))
self.assertIsNone(
user_models.ExpUserLastPlaythroughModel.get_by_id(
exp_play_model_id_2))
self.assertIsNotNone(
user_models.ExpUserLastPlaythroughModel.get_by_id(
exp_play_model_id_1.replace(
self.USER_A_GAE_ID, self.USER_A_ID)))
self.assertIsNotNone(
user_models.ExpUserLastPlaythroughModel.get_by_id(
exp_play_model_id_2.replace(
self.USER_A_GAE_ID, self.USER_A_ID)))
def test_idempotent_copy_model_with_new_id_and_user_id_all_migrated(self):
exp_play_model_id_1 = '%s.%s' % (self.USER_A_GAE_ID, 'exp_1_id')
user_models.ExpUserLastPlaythroughModel(
id=exp_play_model_id_1,
user_id=self.USER_A_GAE_ID,
exploration_id='exp_id',
last_played_exp_version=2,
last_played_state_name='start'
).put()
exp_play_model_id_2 = '%s.%s' % (self.USER_A_GAE_ID, 'exp_2_id')
user_models.ExpUserLastPlaythroughModel(
id=exp_play_model_id_2,
user_id=self.USER_A_GAE_ID,
exploration_id='exp_id',
last_played_exp_version=2,
last_played_state_name='start'
).put()
output = self._run_one_off_job()
self.assertItemsEqual(
output, [['SUCCESS', 1], ['SUCCESS_MISSING_OLD_MODEL', 12]])
output = self._run_one_off_job()
self.assertItemsEqual(
output, [['SUCCESS', 1], ['SUCCESS_MISSING_OLD_MODEL', 12]])
self.assertIsNone(
user_models.ExpUserLastPlaythroughModel.get_by_id(
exp_play_model_id_1))
self.assertIsNone(
user_models.ExpUserLastPlaythroughModel.get_by_id(
exp_play_model_id_2))
self.assertIsNotNone(
user_models.ExpUserLastPlaythroughModel.get_by_id(
exp_play_model_id_1.replace(
self.USER_A_GAE_ID, self.USER_A_ID)))
self.assertIsNotNone(
user_models.ExpUserLastPlaythroughModel.get_by_id(
exp_play_model_id_2.replace(
self.USER_A_GAE_ID, self.USER_A_ID)))
def test_idempotent_change_model_with_one_user_id_field_not_migrated(self):
output = self._run_one_off_job()
self.assertItemsEqual(
output, [['SUCCESS', 1], ['SUCCESS_MISSING_OLD_MODEL', 12]])
exp_models.ExplorationSnapshotMetadataModel(
id='instance_1_id',
committer_id=self.USER_A_GAE_ID,
commit_type='create',
commit_message='commit message 2',
commit_cmds=[{'cmd': 'some_command'}]
).put()
exp_models.ExplorationSnapshotMetadataModel(
id='instance_2_id',
committer_id=self.USER_A_GAE_ID,
commit_type='create',
commit_message='commit message 2',
commit_cmds=[{'cmd': 'some_command'}]
).put()
output = self._run_one_off_job()
self.assertItemsEqual(
output, [['SUCCESS', 1], ['SUCCESS_MISSING_OLD_MODEL', 12]])
self.assertEqual(
exp_models.ExplorationSnapshotMetadataModel.get_by_id(
'instance_1_id').committer_id, self.USER_A_ID)
self.assertEqual(
exp_models.ExplorationSnapshotMetadataModel.get_by_id(
'instance_2_id').committer_id, self.USER_A_ID)
def test_idempotent_change_model_with_one_user_id_field_half_migrated(self):
exp_models.ExplorationSnapshotMetadataModel(
id='instance_1_id',
committer_id=self.USER_A_GAE_ID,
commit_type='create',
commit_message='commit message 2',
commit_cmds=[{'cmd': 'some_command'}]
).put()
output = self._run_one_off_job()
self.assertItemsEqual(
output, [['SUCCESS', 1], ['SUCCESS_MISSING_OLD_MODEL', 12]])
exp_models.ExplorationSnapshotMetadataModel(
id='instance_2_id',
committer_id=self.USER_A_GAE_ID,
commit_type='create',
commit_message='commit message 2',
commit_cmds=[{'cmd': 'some_command'}]
).put()
output = self._run_one_off_job()
self.assertItemsEqual(
output, [['SUCCESS', 1], ['SUCCESS_MISSING_OLD_MODEL', 12]])
self.assertEqual(
exp_models.ExplorationSnapshotMetadataModel.get_by_id(
'instance_1_id').committer_id, self.USER_A_ID)
self.assertEqual(
exp_models.ExplorationSnapshotMetadataModel.get_by_id(
'instance_2_id').committer_id, self.USER_A_ID)
def test_idempotent_change_model_with_one_user_id_field_all_migrated(self):
exp_models.ExplorationSnapshotMetadataModel(
id='instance_1_id',
committer_id=self.USER_A_GAE_ID,
commit_type='create',
commit_message='commit message 2',
commit_cmds=[{'cmd': 'some_command'}]
).put()
exp_models.ExplorationSnapshotMetadataModel(
id='instance_2_id',
committer_id=self.USER_A_GAE_ID,
commit_type='create',
commit_message='commit message 2',
commit_cmds=[{'cmd': 'some_command'}]
).put()
output = self._run_one_off_job()
self.assertItemsEqual(
output, [['SUCCESS', 1], ['SUCCESS_MISSING_OLD_MODEL', 12]])
output = self._run_one_off_job()
self.assertItemsEqual(
output, [['SUCCESS', 1], ['SUCCESS_MISSING_OLD_MODEL', 12]])
self.assertEqual(
exp_models.ExplorationSnapshotMetadataModel.get_by_id(
'instance_1_id').committer_id, self.USER_A_ID)
self.assertEqual(
exp_models.ExplorationSnapshotMetadataModel.get_by_id(
'instance_2_id').committer_id, self.USER_A_ID)
class SnapshotsContentUserIdMigrationJobTests(test_utils.GenericTestBase):
"""Tests for SnapshotsUserIdMigrationJobTests."""
SNAPSHOT_ID = '2'
USER_1_USER_ID = 'uid_aaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaa'
USER_1_GAE_ID = 'gae_id_1'
USER_2_USER_ID = 'uid_bbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbb'
USER_2_GAE_ID = 'gae_id_2'
USER_3_USER_ID = 'uid_cccccccccccccccccccccccccccccccc'
USER_3_GAE_ID = 'gae_id_3'
WRONG_GAE_ID = 'wrong_id'
def _run_one_off_job(self):
"""Runs the one-off MapReduce job."""
job_id = (
user_id_migration.SnapshotsContentUserIdMigrationJob.create_new())
user_id_migration.SnapshotsContentUserIdMigrationJob.enqueue(job_id)
self.assertEqual(
self.count_jobs_in_taskqueue(
taskqueue_services.QUEUE_NAME_ONE_OFF_JOBS), 1)
self.process_and_flush_pending_tasks()
stringified_output = (
user_id_migration.SnapshotsContentUserIdMigrationJob.get_output(
job_id))
eval_output = [ast.literal_eval(stringified_item) for
stringified_item in stringified_output]
return eval_output
def setUp(self):
def empty(*_):
"""Function that takes any number of arguments and does nothing."""
pass
# We don't want to signup the superadmin user.
with self.swap(test_utils.TestBase, 'signup_superadmin_user', empty):
super(SnapshotsContentUserIdMigrationJobTests, self).setUp()
user_models.UserSettingsModel(
id=self.USER_1_USER_ID,
gae_id=self.USER_1_GAE_ID,
email='some@email.com',
role=feconf.ROLE_ID_COLLECTION_EDITOR
).put()
user_models.UserSettingsModel(
id=self.USER_2_USER_ID,
gae_id=self.USER_2_GAE_ID,
email='some.different@email.com',
role=feconf.ROLE_ID_COLLECTION_EDITOR
).put()
user_models.UserSettingsModel(
id=self.USER_3_USER_ID,
gae_id=self.USER_3_GAE_ID,
email='some.different@email.cz',
role=feconf.ROLE_ID_COLLECTION_EDITOR
).put()
def test_migrate_collection_rights_snapshot_model(self):
original_rights_model = collection_models.CollectionRightsModel(
id=self.SNAPSHOT_ID,
owner_ids=[self.USER_1_GAE_ID, self.USER_2_GAE_ID],
editor_ids=[self.USER_1_GAE_ID, feconf.SYSTEM_COMMITTER_ID],
voice_artist_ids=[self.USER_1_GAE_ID, self.USER_2_GAE_ID],
viewer_ids=[self.USER_1_GAE_ID, self.USER_3_GAE_ID],
community_owned=False,
status=constants.ACTIVITY_STATUS_PUBLIC,
viewable_if_private=False,
first_published_msec=0.0
)
original_rights_snapshot_model = (
collection_models.CollectionRightsSnapshotContentModel(
id=self.SNAPSHOT_ID,
content=original_rights_model.to_dict()))
original_rights_snapshot_model.put()
output = self._run_one_off_job()
self.assertEqual(
output[0], [u'SUCCESS - CollectionRightsSnapshotContentModel', 1])
migrated_rights_snapshot_model = (
collection_models.CollectionRightsSnapshotContentModel.get_by_id(
self.SNAPSHOT_ID))
self.assertEqual(
original_rights_snapshot_model.last_updated,
migrated_rights_snapshot_model.last_updated)
migrated_rights_model = collection_models.CollectionRightsModel(
**migrated_rights_snapshot_model.content)
self.assertEqual(
[self.USER_1_USER_ID, self.USER_2_USER_ID],
migrated_rights_model.owner_ids)
self.assertEqual(
[self.USER_1_USER_ID, feconf.SYSTEM_COMMITTER_ID],
migrated_rights_model.editor_ids)
self.assertEqual(
[self.USER_1_USER_ID, self.USER_2_USER_ID],
migrated_rights_model.voice_artist_ids)
self.assertEqual(
[self.USER_1_USER_ID, self.USER_3_USER_ID],
migrated_rights_model.viewer_ids)
def test_migrate_collection_rights_snapshot_model_wrong_id(self):
original_rights_model = collection_models.CollectionRightsModel(
id=self.SNAPSHOT_ID,
owner_ids=[self.WRONG_GAE_ID],
editor_ids=[self.WRONG_GAE_ID],
voice_artist_ids=[self.WRONG_GAE_ID],
viewer_ids=[self.WRONG_GAE_ID],
community_owned=False,
status=constants.ACTIVITY_STATUS_PUBLIC,
viewable_if_private=False,
first_published_msec=0.0
)
original_rights_snapshot_model = (
collection_models.CollectionRightsSnapshotContentModel(
id=self.SNAPSHOT_ID,
content=original_rights_model.to_dict()))
original_rights_snapshot_model.put()
output = self._run_one_off_job()
self.assertIn(
['FAILURE - CollectionRightsSnapshotContentModel',
[self.WRONG_GAE_ID]],
output)
def test_migrate_exp_rights_snapshot_model(self):
original_rights_model = exp_models.ExplorationRightsModel(
id=self.SNAPSHOT_ID,
owner_ids=[self.USER_1_GAE_ID, self.USER_2_GAE_ID],
editor_ids=[self.USER_1_GAE_ID, feconf.SYSTEM_COMMITTER_ID],
voice_artist_ids=[self.USER_1_GAE_ID, self.USER_2_GAE_ID],
viewer_ids=[self.USER_1_GAE_ID, self.USER_3_GAE_ID],
community_owned=False,
status=constants.ACTIVITY_STATUS_PUBLIC,
viewable_if_private=False,
first_published_msec=0.0)
original_rights_snapshot_model = (
exp_models.ExplorationRightsSnapshotContentModel(
id=self.SNAPSHOT_ID,
content=original_rights_model.to_dict()))
original_rights_snapshot_model.put()
output = self._run_one_off_job()
self.assertEqual(
output[0],
[u'SUCCESS - ExplorationRightsSnapshotContentModel', 1])
migrated_rights_snapshot_model = (
exp_models.ExplorationRightsSnapshotContentModel.get_by_id(
self.SNAPSHOT_ID))
self.assertEqual(
original_rights_snapshot_model.last_updated,
migrated_rights_snapshot_model.last_updated)
migrated_rights_model = exp_models.ExplorationRightsModel(
**migrated_rights_snapshot_model.content)
self.assertEqual(
[self.USER_1_USER_ID, self.USER_2_USER_ID],
migrated_rights_model.owner_ids)
self.assertEqual(
[self.USER_1_USER_ID, feconf.SYSTEM_COMMITTER_ID],
migrated_rights_model.editor_ids)
self.assertEqual(
[self.USER_1_USER_ID, self.USER_2_USER_ID],
migrated_rights_model.voice_artist_ids)
self.assertEqual(
[self.USER_1_USER_ID, self.USER_3_USER_ID],
migrated_rights_model.viewer_ids)
def test_migrate_exp_rights_snapshot_model_wrong_id(self):
original_rights_model = exp_models.ExplorationRightsModel(
id=self.SNAPSHOT_ID,
owner_ids=[self.WRONG_GAE_ID],
editor_ids=[self.WRONG_GAE_ID],
voice_artist_ids=[self.WRONG_GAE_ID],
viewer_ids=[self.WRONG_GAE_ID],
community_owned=False,
status=constants.ACTIVITY_STATUS_PUBLIC,
viewable_if_private=False,
first_published_msec=0.0
)
original_rights_snapshot_model = (
exp_models.ExplorationRightsSnapshotContentModel(
id=self.SNAPSHOT_ID,
content=original_rights_model.to_dict()))
original_rights_snapshot_model.put()
output = self._run_one_off_job()
self.assertIn(
['FAILURE - ExplorationRightsSnapshotContentModel',
[self.WRONG_GAE_ID]],
output)
def test_migrate_exp_rights_snapshot_model_admin(self):
original_rights_model = exp_models.ExplorationRightsModel(
id=self.SNAPSHOT_ID,
owner_ids=[self.USER_1_GAE_ID, self.USER_2_GAE_ID],
editor_ids=[self.USER_1_GAE_ID, feconf.SYSTEM_COMMITTER_ID],
voice_artist_ids=[self.USER_1_GAE_ID, self.USER_2_GAE_ID],
viewer_ids=[self.USER_1_GAE_ID, self.USER_3_GAE_ID],
community_owned=False,
status=constants.ACTIVITY_STATUS_PUBLIC,
viewable_if_private=False,
first_published_msec=0.0)
original_rights_snapshot_model = (
exp_models.ExplorationRightsSnapshotContentModel(
id=self.SNAPSHOT_ID,
content=original_rights_model.to_dict()))
original_rights_snapshot_model.content['all_viewer_ids'] = ['id1']
original_rights_snapshot_model.put()
output = self._run_one_off_job()
self.assertEqual(
output[0],
[u'SUCCESS - ExplorationRightsSnapshotContentModel', 1])
migrated_rights_snapshot_model = (
exp_models.ExplorationRightsSnapshotContentModel.get_by_id(
self.SNAPSHOT_ID))
self.assertEqual(
original_rights_snapshot_model.last_updated,
migrated_rights_snapshot_model.last_updated)
self.assertNotIn(
'all_viewer_ids', migrated_rights_snapshot_model.content)
migrated_rights_model = exp_models.ExplorationRightsModel(
**migrated_rights_snapshot_model.content)
self.assertEqual(
[self.USER_1_USER_ID, self.USER_2_USER_ID],
migrated_rights_model.owner_ids)
self.assertEqual(
[self.USER_1_USER_ID, feconf.SYSTEM_COMMITTER_ID],
migrated_rights_model.editor_ids)
self.assertEqual(
[self.USER_1_USER_ID, self.USER_2_USER_ID],
migrated_rights_model.voice_artist_ids)
self.assertEqual(
[self.USER_1_USER_ID, self.USER_3_USER_ID],
migrated_rights_model.viewer_ids)
def test_migrate_exp_rights_snapshot_model_migration_bot(self):
original_rights_model = exp_models.ExplorationRightsModel(
id=self.SNAPSHOT_ID,
owner_ids=[self.USER_1_GAE_ID, self.USER_2_GAE_ID],
editor_ids=[self.USER_1_GAE_ID, feconf.MIGRATION_BOT_USER_ID],
voice_artist_ids=[self.USER_1_GAE_ID, self.USER_2_GAE_ID],
viewer_ids=[self.USER_1_GAE_ID, self.USER_3_GAE_ID],
community_owned=False,
status=constants.ACTIVITY_STATUS_PUBLIC,
viewable_if_private=False,
first_published_msec=0.0)
original_rights_snapshot_model = (
exp_models.ExplorationRightsSnapshotContentModel(
id=self.SNAPSHOT_ID,
content=original_rights_model.to_dict()))
original_rights_snapshot_model.content['all_viewer_ids'] = ['id1']
original_rights_snapshot_model.put()
output = self._run_one_off_job()
self.assertEqual(
output[0],
[u'SUCCESS - ExplorationRightsSnapshotContentModel', 1])
migrated_rights_snapshot_model = (
exp_models.ExplorationRightsSnapshotContentModel.get_by_id(
self.SNAPSHOT_ID))
self.assertEqual(
original_rights_snapshot_model.last_updated,
migrated_rights_snapshot_model.last_updated)
self.assertNotIn(
'all_viewer_ids', migrated_rights_snapshot_model.content)
migrated_rights_model = exp_models.ExplorationRightsModel(
**migrated_rights_snapshot_model.content)
self.assertEqual(
[self.USER_1_USER_ID, self.USER_2_USER_ID],
migrated_rights_model.owner_ids)
self.assertEqual(
[self.USER_1_USER_ID, feconf.MIGRATION_BOT_USER_ID],
migrated_rights_model.editor_ids)
self.assertEqual(
[self.USER_1_USER_ID, self.USER_2_USER_ID],
migrated_rights_model.voice_artist_ids)
self.assertEqual(
[self.USER_1_USER_ID, self.USER_3_USER_ID],
migrated_rights_model.viewer_ids)
def test_migrate_topic_rights_snapshot_model(self):
original_rights_model = topic_models.TopicRightsModel(
manager_ids=[self.USER_1_GAE_ID, self.USER_2_GAE_ID,
feconf.SYSTEM_COMMITTER_ID])
original_rights_snapshot_model = (
topic_models.TopicRightsSnapshotContentModel(
id=self.SNAPSHOT_ID,
content=original_rights_model.to_dict()))
original_rights_snapshot_model.put()
output = self._run_one_off_job()
self.assertEqual(
output[0], [u'SUCCESS - TopicRightsSnapshotContentModel', 1])
migrated_rights_snapshot_model = (
topic_models.TopicRightsSnapshotContentModel.get_by_id(
self.SNAPSHOT_ID))
self.assertEqual(
original_rights_snapshot_model.last_updated,
migrated_rights_snapshot_model.last_updated)
migrated_rights_model = topic_models.TopicRightsModel(
**migrated_rights_snapshot_model.content)
self.assertEqual(
[self.USER_1_USER_ID, self.USER_2_USER_ID,
feconf.SYSTEM_COMMITTER_ID],
migrated_rights_model.manager_ids)
def test_migrate_topic_rights_snapshot_model_wrong_id(self):
original_rights_model = topic_models.TopicRightsModel(
manager_ids=[self.WRONG_GAE_ID])
original_rights_snapshot_model = (
topic_models.TopicRightsSnapshotContentModel(
id=self.SNAPSHOT_ID,
content=original_rights_model.to_dict()))
original_rights_snapshot_model.put()
output = self._run_one_off_job()
self.assertIn(
['FAILURE - TopicRightsSnapshotContentModel',
[self.WRONG_GAE_ID]],
output)
def test_idempotent_change_model_not_migrated(self):
original_rights_model_1 = collection_models.CollectionRightsModel(
id='instance_1_id',
owner_ids=[self.USER_1_GAE_ID, self.USER_2_GAE_ID],
editor_ids=[self.USER_1_GAE_ID, feconf.SYSTEM_COMMITTER_ID],
voice_artist_ids=[self.USER_1_GAE_ID, self.USER_2_GAE_ID],
viewer_ids=[self.USER_1_GAE_ID, self.USER_3_GAE_ID],
community_owned=False,
status=constants.ACTIVITY_STATUS_PUBLIC,
viewable_if_private=False,
first_published_msec=0.0)
collection_models.CollectionRightsSnapshotContentModel(
id='instance_1_id',
content=original_rights_model_1.to_dict()).put()
original_rights_model_2 = collection_models.CollectionRightsModel(
id='instance_2_id',
owner_ids=[self.USER_1_GAE_ID, self.USER_2_GAE_ID],
editor_ids=[self.USER_1_GAE_ID, feconf.SYSTEM_COMMITTER_ID],
voice_artist_ids=[self.USER_1_GAE_ID, self.USER_2_GAE_ID],
viewer_ids=[self.USER_1_GAE_ID, self.USER_3_GAE_ID],
community_owned=False,
status=constants.ACTIVITY_STATUS_PUBLIC,
viewable_if_private=False,
first_published_msec=0.0)
collection_models.CollectionRightsSnapshotContentModel(
id='instance_2_id',
content=original_rights_model_2.to_dict()).put()
output = self._run_one_off_job()
self.assertItemsEqual(
output, [['SUCCESS - CollectionRightsSnapshotContentModel', 2]])
original_rights_model_1 = collection_models.CollectionRightsModel(
id='instance_1_id',
owner_ids=[self.USER_1_GAE_ID, self.USER_2_GAE_ID],
editor_ids=[self.USER_1_GAE_ID, feconf.SYSTEM_COMMITTER_ID],
voice_artist_ids=[self.USER_1_GAE_ID, self.USER_2_GAE_ID],
viewer_ids=[self.USER_1_GAE_ID, self.USER_3_GAE_ID],
community_owned=False,
status=constants.ACTIVITY_STATUS_PUBLIC,
viewable_if_private=False,
first_published_msec=0.0)
collection_models.CollectionRightsSnapshotContentModel(
id='instance_1_id',
content=original_rights_model_1.to_dict()).put()
original_rights_model_2 = collection_models.CollectionRightsModel(
id='instance_2_id',
owner_ids=[self.USER_1_GAE_ID, self.USER_2_GAE_ID],
editor_ids=[self.USER_1_GAE_ID, feconf.SYSTEM_COMMITTER_ID],
voice_artist_ids=[self.USER_1_GAE_ID, self.USER_2_GAE_ID],
viewer_ids=[self.USER_1_GAE_ID, self.USER_3_GAE_ID],
community_owned=False,
status=constants.ACTIVITY_STATUS_PUBLIC,
viewable_if_private=False,
first_published_msec=0.0)
collection_models.CollectionRightsSnapshotContentModel(
id='instance_2_id',
content=original_rights_model_2.to_dict()).put()
output = self._run_one_off_job()
self.assertItemsEqual(
output, [['SUCCESS - CollectionRightsSnapshotContentModel', 2]])
migrated_rights_snapshot_model_1 = (
collection_models.CollectionRightsSnapshotContentModel.get_by_id(
'instance_1_id'))
migrated_rights_model_1 = collection_models.CollectionRightsModel(
**migrated_rights_snapshot_model_1.content)
self.assertEqual(
[self.USER_1_USER_ID, self.USER_2_USER_ID],
migrated_rights_model_1.owner_ids)
self.assertEqual(
[self.USER_1_USER_ID, feconf.SYSTEM_COMMITTER_ID],
migrated_rights_model_1.editor_ids)
self.assertEqual(
[self.USER_1_USER_ID, self.USER_2_USER_ID],
migrated_rights_model_1.voice_artist_ids)
self.assertEqual(
[self.USER_1_USER_ID, self.USER_3_USER_ID],
migrated_rights_model_1.viewer_ids)
migrated_rights_snapshot_model_2 = (
collection_models.CollectionRightsSnapshotContentModel.get_by_id(
'instance_2_id'))
migrated_rights_model_2 = collection_models.CollectionRightsModel(
**migrated_rights_snapshot_model_2.content)
self.assertEqual(
[self.USER_1_USER_ID, self.USER_2_USER_ID],
migrated_rights_model_2.owner_ids)
self.assertEqual(
[self.USER_1_USER_ID, feconf.SYSTEM_COMMITTER_ID],
migrated_rights_model_2.editor_ids)
self.assertEqual(
[self.USER_1_USER_ID, self.USER_2_USER_ID],
migrated_rights_model_2.voice_artist_ids)
self.assertEqual(
[self.USER_1_USER_ID, self.USER_3_USER_ID],
migrated_rights_model_2.viewer_ids)
def test_idempotent_change_model_half_migrated(self):
original_rights_model_1 = collection_models.CollectionRightsModel(
id='instance_1_id',
owner_ids=[self.USER_1_GAE_ID, self.USER_2_GAE_ID],
editor_ids=[self.USER_1_GAE_ID, feconf.SYSTEM_COMMITTER_ID],
voice_artist_ids=[self.USER_1_GAE_ID, self.USER_2_GAE_ID],
viewer_ids=[self.USER_1_GAE_ID, self.USER_3_GAE_ID],
community_owned=False,
status=constants.ACTIVITY_STATUS_PUBLIC,
viewable_if_private=False,
first_published_msec=0.0)
collection_models.CollectionRightsSnapshotContentModel(
id='instance_1_id',
content=original_rights_model_1.to_dict()).put()
original_rights_model_2 = collection_models.CollectionRightsModel(
id='instance_2_id',
owner_ids=[self.USER_1_GAE_ID, self.USER_2_GAE_ID],
editor_ids=[self.USER_1_GAE_ID, feconf.SYSTEM_COMMITTER_ID],
voice_artist_ids=[self.USER_1_GAE_ID, self.USER_2_GAE_ID],
viewer_ids=[self.USER_1_GAE_ID, self.USER_3_GAE_ID],
community_owned=False,
status=constants.ACTIVITY_STATUS_PUBLIC,
viewable_if_private=False,
first_published_msec=0.0)
collection_models.CollectionRightsSnapshotContentModel(
id='instance_2_id',
content=original_rights_model_2.to_dict()).put()
output = self._run_one_off_job()
self.assertItemsEqual(
output, [['SUCCESS - CollectionRightsSnapshotContentModel', 2]])
original_rights_model_2 = collection_models.CollectionRightsModel(
id='instance_2_id',
owner_ids=[self.USER_1_GAE_ID, self.USER_2_GAE_ID],
editor_ids=[self.USER_1_GAE_ID, feconf.SYSTEM_COMMITTER_ID],
voice_artist_ids=[self.USER_1_GAE_ID, self.USER_2_GAE_ID],
viewer_ids=[self.USER_1_GAE_ID, self.USER_3_GAE_ID],
community_owned=False,
status=constants.ACTIVITY_STATUS_PUBLIC,
viewable_if_private=False,
first_published_msec=0.0)
collection_models.CollectionRightsSnapshotContentModel(
id='instance_2_id',
content=original_rights_model_2.to_dict()).put()
output = self._run_one_off_job()
self.assertItemsEqual(
output,
[['SUCCESS - CollectionRightsSnapshotContentModel', 1],
['SUCCESS_ALREADY_MIGRATED - CollectionRightsSnapshotContentModel',
1]])
migrated_rights_snapshot_model_1 = (
collection_models.CollectionRightsSnapshotContentModel.get_by_id(
'instance_1_id'))
migrated_rights_model_1 = collection_models.CollectionRightsModel(
**migrated_rights_snapshot_model_1.content)
self.assertEqual(
[self.USER_1_USER_ID, self.USER_2_USER_ID],
migrated_rights_model_1.owner_ids)
self.assertEqual(
[self.USER_1_USER_ID, feconf.SYSTEM_COMMITTER_ID],
migrated_rights_model_1.editor_ids)
self.assertEqual(
[self.USER_1_USER_ID, self.USER_2_USER_ID],
migrated_rights_model_1.voice_artist_ids)
self.assertEqual(
[self.USER_1_USER_ID, self.USER_3_USER_ID],
migrated_rights_model_1.viewer_ids)
migrated_rights_snapshot_model_2 = (
collection_models.CollectionRightsSnapshotContentModel.get_by_id(
'instance_2_id'))
migrated_rights_model_2 = collection_models.CollectionRightsModel(
**migrated_rights_snapshot_model_2.content)
self.assertEqual(
[self.USER_1_USER_ID, self.USER_2_USER_ID],
migrated_rights_model_2.owner_ids)
self.assertEqual(
[self.USER_1_USER_ID, feconf.SYSTEM_COMMITTER_ID],
migrated_rights_model_2.editor_ids)
self.assertEqual(
[self.USER_1_USER_ID, self.USER_2_USER_ID],
migrated_rights_model_2.voice_artist_ids)
self.assertEqual(
[self.USER_1_USER_ID, self.USER_3_USER_ID],
migrated_rights_model_2.viewer_ids)
def test_idempotent_change_model_all_migrated(self):
original_rights_model_1 = collection_models.CollectionRightsModel(
id='instance_1_id',
owner_ids=[self.USER_1_GAE_ID, self.USER_2_GAE_ID],
editor_ids=[self.USER_1_GAE_ID, feconf.SYSTEM_COMMITTER_ID],
voice_artist_ids=[self.USER_1_GAE_ID, self.USER_2_GAE_ID],
viewer_ids=[self.USER_1_GAE_ID, self.USER_3_GAE_ID],
community_owned=False,
status=constants.ACTIVITY_STATUS_PUBLIC,
viewable_if_private=False,
first_published_msec=0.0)
collection_models.CollectionRightsSnapshotContentModel(
id='instance_1_id',
content=original_rights_model_1.to_dict()).put()
original_rights_model_2 = collection_models.CollectionRightsModel(
id='instance_2_id',
owner_ids=[self.USER_1_GAE_ID, self.USER_2_GAE_ID],
editor_ids=[self.USER_1_GAE_ID, feconf.SYSTEM_COMMITTER_ID],
voice_artist_ids=[self.USER_1_GAE_ID, self.USER_2_GAE_ID],
viewer_ids=[self.USER_1_GAE_ID, self.USER_3_GAE_ID],
community_owned=False,
status=constants.ACTIVITY_STATUS_PUBLIC,
viewable_if_private=False,
first_published_msec=0.0)
collection_models.CollectionRightsSnapshotContentModel(
id='instance_2_id',
content=original_rights_model_2.to_dict()).put()
output = self._run_one_off_job()
self.assertItemsEqual(
output, [['SUCCESS - CollectionRightsSnapshotContentModel', 2]])
output = self._run_one_off_job()
self.assertItemsEqual(
output,
[['SUCCESS_ALREADY_MIGRATED - CollectionRightsSnapshotContentModel',
2]])
migrated_rights_snapshot_model_1 = (
collection_models.CollectionRightsSnapshotContentModel.get_by_id(
'instance_1_id'))
migrated_rights_model_1 = collection_models.CollectionRightsModel(
**migrated_rights_snapshot_model_1.content)
self.assertEqual(
[self.USER_1_USER_ID, self.USER_2_USER_ID],
migrated_rights_model_1.owner_ids)
self.assertEqual(
[self.USER_1_USER_ID, feconf.SYSTEM_COMMITTER_ID],
migrated_rights_model_1.editor_ids)
self.assertEqual(
[self.USER_1_USER_ID, self.USER_2_USER_ID],
migrated_rights_model_1.voice_artist_ids)
self.assertEqual(
[self.USER_1_USER_ID, self.USER_3_USER_ID],
migrated_rights_model_1.viewer_ids)
migrated_rights_snapshot_model_2 = (
collection_models.CollectionRightsSnapshotContentModel.get_by_id(
'instance_2_id'))
migrated_rights_model_2 = collection_models.CollectionRightsModel(
**migrated_rights_snapshot_model_2.content)
self.assertEqual(
[self.USER_1_USER_ID, self.USER_2_USER_ID],
migrated_rights_model_2.owner_ids)
self.assertEqual(
[self.USER_1_USER_ID, feconf.SYSTEM_COMMITTER_ID],
migrated_rights_model_2.editor_ids)
self.assertEqual(
[self.USER_1_USER_ID, self.USER_2_USER_ID],
migrated_rights_model_2.voice_artist_ids)
self.assertEqual(
[self.USER_1_USER_ID, self.USER_3_USER_ID],
migrated_rights_model_2.viewer_ids)
class SnapshotsMetadataUserIdMigrationJobTests(test_utils.GenericTestBase):
"""Tests for SnapshotsUserIdMigrationJobTests."""
USER_1_USER_ID = 'uid_aaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaa'
USER_1_GAE_ID = 'gae_id_1'
WRONG_GAE_ID = 'wrong_id'
def _run_one_off_job(self):
"""Runs the one-off MapReduce job."""
job_id = (
user_id_migration.SnapshotsMetadataUserIdMigrationJob.create_new())
user_id_migration.SnapshotsMetadataUserIdMigrationJob.enqueue(job_id)
self.assertEqual(
self.count_jobs_in_taskqueue(
taskqueue_services.QUEUE_NAME_ONE_OFF_JOBS), 1)
self.process_and_flush_pending_tasks()
stringified_output = (
user_id_migration.SnapshotsMetadataUserIdMigrationJob.get_output(
job_id))
eval_output = [ast.literal_eval(stringified_item) for
stringified_item in stringified_output]
return eval_output
def setUp(self):
def empty(*_):
"""Function that takes any number of arguments and does nothing."""
pass
# We don't want to signup the superadmin user.
with self.swap(test_utils.TestBase, 'signup_superadmin_user', empty):
super(SnapshotsMetadataUserIdMigrationJobTests, self).setUp()
user_models.UserSettingsModel(
id=self.USER_1_USER_ID,
gae_id=self.USER_1_GAE_ID,
email='some@email.com',
role=feconf.ROLE_ID_COLLECTION_EDITOR
).put()
def test_migrate_collection_rights_snapshot_model(self):
collection_models.CollectionRightsSnapshotMetadataModel(
id='col_1_id-1',
committer_id=self.USER_1_GAE_ID,
commit_type='edit',
commit_message='commit message 2',
commit_cmds=[{
'cmd': rights_manager.CMD_CHANGE_ROLE,
'assignee_id': self.USER_1_GAE_ID,
'old_role': rights_manager.ROLE_NONE,
'new_role': rights_manager.ROLE_EDITOR}]
).put()
collection_models.CollectionCommitLogEntryModel(
id='rights-col_1_id-1',
user_id=self.USER_1_GAE_ID,
username='user',
collection_id='col_1_id',
commit_type='edit',
commit_message='commit message 2',
commit_cmds=[{
'cmd': rights_manager.CMD_CHANGE_ROLE,
'assignee_id': self.USER_1_GAE_ID,
'old_role': rights_manager.ROLE_NONE,
'new_role': rights_manager.ROLE_EDITOR}],
version=1,
post_commit_status='public'
).put()
output = self._run_one_off_job()
self.assertEqual(
output, [['SUCCESS - CollectionRightsSnapshotMetadataModel', 1]])
rights_snapshot_model = (
collection_models.CollectionRightsSnapshotMetadataModel.get_by_id(
'col_1_id-1'))
self.assertEqual(
rights_snapshot_model.commit_cmds[0]['assignee_id'],
self.USER_1_USER_ID)
rights_commit_model = (
collection_models.CollectionCommitLogEntryModel.get_by_id(
'rights-col_1_id-1'))
self.assertEqual(
rights_commit_model.commit_cmds[0]['assignee_id'],
self.USER_1_USER_ID)
def test_migrate_collection_rights_snapshot_model_admin_id(self):
collection_models.CollectionRightsSnapshotMetadataModel(
id='col_1_id-1',
committer_id=self.USER_1_GAE_ID,
commit_type='edit',
commit_message='commit message 2',
commit_cmds=[{
'cmd': rights_manager.CMD_CHANGE_ROLE,
'assignee_id': feconf.SYSTEM_COMMITTER_ID,
'old_role': rights_manager.ROLE_NONE,
'new_role': rights_manager.ROLE_EDITOR}]
).put()
collection_models.CollectionCommitLogEntryModel(
id='rights-col_1_id-1',
user_id=self.USER_1_GAE_ID,
username='user',
collection_id='col_1_id',
commit_type='edit',
commit_message='commit message 2',
commit_cmds=[{
'cmd': rights_manager.CMD_CHANGE_ROLE,
'assignee_id': feconf.SYSTEM_COMMITTER_ID,
'old_role': rights_manager.ROLE_NONE,
'new_role': rights_manager.ROLE_EDITOR}],
version=1,
post_commit_status='public'
).put()
output = self._run_one_off_job()
self.assertEqual(
output, [['SUCCESS - CollectionRightsSnapshotMetadataModel', 1]])
rights_snapshot_model = (
collection_models.CollectionRightsSnapshotMetadataModel.get_by_id(
'col_1_id-1'))
self.assertEqual(
rights_snapshot_model.commit_cmds[0]['assignee_id'],
feconf.SYSTEM_COMMITTER_ID)
rights_commit_model = (
collection_models.CollectionCommitLogEntryModel.get_by_id(
'rights-col_1_id-1'))
self.assertEqual(
rights_commit_model.commit_cmds[0]['assignee_id'],
feconf.SYSTEM_COMMITTER_ID)
def test_migrate_collection_rights_snapshot_model_bot_id(self):
collection_models.CollectionRightsSnapshotMetadataModel(
id='col_1_id-1',
committer_id=self.USER_1_GAE_ID,
commit_type='edit',
commit_message='commit message 2',
commit_cmds=[{
'cmd': rights_manager.CMD_CHANGE_ROLE,
'assignee_id': feconf.MIGRATION_BOT_USER_ID,
'old_role': rights_manager.ROLE_NONE,
'new_role': rights_manager.ROLE_EDITOR}]
).put()
collection_models.CollectionCommitLogEntryModel(
id='rights-col_1_id-1',
user_id=self.USER_1_GAE_ID,
username='user',
collection_id='col_1_id',
commit_type='edit',
commit_message='commit message 2',
commit_cmds=[{
'cmd': rights_manager.CMD_CHANGE_ROLE,
'assignee_id': feconf.MIGRATION_BOT_USER_ID,
'old_role': rights_manager.ROLE_NONE,
'new_role': rights_manager.ROLE_EDITOR}],
version=1,
post_commit_status='public'
).put()
output = self._run_one_off_job()
self.assertEqual(
output, [['SUCCESS - CollectionRightsSnapshotMetadataModel', 1]])
rights_snapshot_model = (
collection_models.CollectionRightsSnapshotMetadataModel.get_by_id(
'col_1_id-1'))
self.assertEqual(
rights_snapshot_model.commit_cmds[0]['assignee_id'],
feconf.MIGRATION_BOT_USER_ID)
rights_commit_model = (
collection_models.CollectionCommitLogEntryModel.get_by_id(
'rights-col_1_id-1'))
self.assertEqual(
rights_commit_model.commit_cmds[0]['assignee_id'],
feconf.MIGRATION_BOT_USER_ID)
def test_migrate_collection_rights_snapshot_model_missing_commit(self):
collection_models.CollectionRightsSnapshotMetadataModel(
id='col_1_id-1',
committer_id=self.USER_1_GAE_ID,
commit_type='edit',
commit_message='commit message 2',
commit_cmds=[{
'cmd': rights_manager.CMD_CHANGE_ROLE,
'assignee_id': self.USER_1_GAE_ID,
'old_role': rights_manager.ROLE_NONE,
'new_role': rights_manager.ROLE_EDITOR}]
).put()
output = self._run_one_off_job()
self.assertEqual(
output, [
['SUCCESS_MISSING_COMMIT - '
'CollectionRightsSnapshotMetadataModel',
['col_1_id-1']]])
rights_snapshot_model = (
collection_models.CollectionRightsSnapshotMetadataModel.get_by_id(
'col_1_id-1'))
self.assertEqual(
rights_snapshot_model.commit_cmds[0]['assignee_id'],
self.USER_1_USER_ID)
def test_migrate_collection_rights_snapshot_model_wrong_id(self):
collection_models.CollectionRightsSnapshotMetadataModel(
id='col_1_id-1',
committer_id=self.USER_1_GAE_ID,
commit_type='edit',
commit_message='commit message 2',
commit_cmds=[{
'cmd': rights_manager.CMD_CHANGE_ROLE,
'assignee_id': self.WRONG_GAE_ID,
'old_role': rights_manager.ROLE_NONE,
'new_role': rights_manager.ROLE_EDITOR}]
).put()
collection_models.CollectionCommitLogEntryModel(
id='rights-col_1_id-1',
user_id=self.USER_1_GAE_ID,
username='user',
collection_id='col_1_id',
commit_type='edit',
commit_message='commit message 2',
commit_cmds=[{
'cmd': rights_manager.CMD_CHANGE_ROLE,
'assignee_id': self.WRONG_GAE_ID,
'old_role': rights_manager.ROLE_NONE,
'new_role': rights_manager.ROLE_EDITOR}],
version=1,
post_commit_status='public'
).put()
output = self._run_one_off_job()
self.assertEqual(
output,
[['FAILURE - CollectionRightsSnapshotMetadataModel',
[self.WRONG_GAE_ID]]])
def test_migrate_exploration_rights_snapshot_model(self):
exp_models.ExplorationRightsSnapshotMetadataModel(
id='exp_1_id-1',
committer_id=self.USER_1_GAE_ID,
commit_type='edit',
commit_message='commit message 2',
commit_cmds=[{
'cmd': rights_manager.CMD_CHANGE_ROLE,
'assignee_id': self.USER_1_GAE_ID,
'old_role': rights_manager.ROLE_NONE,
'new_role': rights_manager.ROLE_EDITOR}]
).put()
exp_models.ExplorationCommitLogEntryModel(
id='rights-exp_1_id-1',
user_id=self.USER_1_GAE_ID,
username='user',
exploration_id='exp_1_id',
commit_type='edit',
commit_message='commit message 2',
commit_cmds=[{
'cmd': rights_manager.CMD_CHANGE_ROLE,
'assignee_id': self.USER_1_GAE_ID,
'old_role': rights_manager.ROLE_NONE,
'new_role': rights_manager.ROLE_EDITOR}],
version=1,
post_commit_status='public'
).put()
output = self._run_one_off_job()
self.assertEqual(
output, [['SUCCESS - ExplorationRightsSnapshotMetadataModel', 1]])
rights_snapshot_model = (
exp_models.ExplorationRightsSnapshotMetadataModel.get_by_id(
'exp_1_id-1'))
self.assertEqual(
rights_snapshot_model.commit_cmds[0]['assignee_id'],
self.USER_1_USER_ID)
rights_commit_model = (
exp_models.ExplorationCommitLogEntryModel.get_by_id(
'rights-exp_1_id-1'))
self.assertEqual(
rights_commit_model.commit_cmds[0]['assignee_id'],
self.USER_1_USER_ID)
def test_migrate_topic_rights_snapshot_model_change_role(self):
topic_models.TopicRightsSnapshotMetadataModel(
id='top_1_id-1',
committer_id=self.USER_1_GAE_ID,
commit_type='edit',
commit_message='commit message 2',
commit_cmds=[{
'cmd': topic_domain.CMD_CHANGE_ROLE,
'assignee_id': self.USER_1_GAE_ID,
'old_role': rights_manager.ROLE_NONE,
'new_role': rights_manager.ROLE_EDITOR}]
).put()
topic_models.TopicCommitLogEntryModel(
id='rights-top_1_id-1',
user_id=self.USER_1_GAE_ID,
username='user',
topic_id='top_1_id',
commit_type='edit',
commit_message='commit message 2',
commit_cmds=[{
'cmd': topic_domain.CMD_CHANGE_ROLE,
'assignee_id': self.USER_1_GAE_ID,
'old_role': rights_manager.ROLE_NONE,
'new_role': rights_manager.ROLE_EDITOR}],
version=1,
post_commit_status='public'
).put()
output = self._run_one_off_job()
self.assertEqual(
output, [['SUCCESS - TopicRightsSnapshotMetadataModel', 1]])
rights_snapshot_model = (
topic_models.TopicRightsSnapshotMetadataModel.get_by_id(
'top_1_id-1'))
self.assertEqual(
rights_snapshot_model.commit_cmds[0]['assignee_id'],
self.USER_1_USER_ID)
rights_commit_model = (
topic_models.TopicCommitLogEntryModel.get_by_id(
'rights-top_1_id-1'))
self.assertEqual(
rights_commit_model.commit_cmds[0]['assignee_id'],
self.USER_1_USER_ID)
def test_migrate_topic_rights_snapshot_model_remove_manager_role(self):
topic_models.TopicRightsSnapshotMetadataModel(
id='top_1_id-1',
committer_id=self.USER_1_GAE_ID,
commit_type='edit',
commit_message='commit message 2',
commit_cmds=[{
'cmd': topic_domain.CMD_REMOVE_MANAGER_ROLE,
'removed_user_id': self.USER_1_GAE_ID}]
).put()
topic_models.TopicCommitLogEntryModel(
id='rights-top_1_id-1',
user_id=self.USER_1_GAE_ID,
username='user',
topic_id='top_1_id',
commit_type='edit',
commit_message='commit message 2',
commit_cmds=[{
'cmd': topic_domain.CMD_REMOVE_MANAGER_ROLE,
'removed_user_id': self.USER_1_GAE_ID}],
version=1,
post_commit_status='public'
).put()
output = self._run_one_off_job()
self.assertEqual(
output, [['SUCCESS - TopicRightsSnapshotMetadataModel', 1]])
rights_snapshot_model = (
topic_models.TopicRightsSnapshotMetadataModel.get_by_id(
'top_1_id-1'))
self.assertEqual(
rights_snapshot_model.commit_cmds[0]['removed_user_id'],
self.USER_1_USER_ID)
rights_commit_model = (
topic_models.TopicCommitLogEntryModel.get_by_id(
'rights-top_1_id-1'))
self.assertEqual(
rights_commit_model.commit_cmds[0]['removed_user_id'],
self.USER_1_USER_ID)
def test_migrate_topic_rights_snapshot_model_missing_commit(self):
topic_models.TopicRightsSnapshotMetadataModel(
id='top_1_id-1',
committer_id=self.USER_1_GAE_ID,
commit_type='edit',
commit_message='commit message 2',
commit_cmds=[{
'cmd': topic_domain.CMD_REMOVE_MANAGER_ROLE,
'removed_user_id': self.USER_1_GAE_ID}]
).put()
output = self._run_one_off_job()
self.assertEqual(
output, [
['SUCCESS_MISSING_COMMIT - '
'TopicRightsSnapshotMetadataModel',
['top_1_id-1']]])
rights_snapshot_model = (
topic_models.TopicRightsSnapshotMetadataModel.get_by_id(
'top_1_id-1'))
self.assertEqual(
rights_snapshot_model.commit_cmds[0]['removed_user_id'],
self.USER_1_USER_ID)
def test_idempotent_change_model_not_migrated(self):
collection_models.CollectionRightsSnapshotMetadataModel(
id='col_1_id-1',
committer_id=self.USER_1_GAE_ID,
commit_type='edit',
commit_message='commit message 2',
commit_cmds=[{
'cmd': rights_manager.CMD_CHANGE_ROLE,
'assignee_id': self.USER_1_GAE_ID,
'old_role': rights_manager.ROLE_NONE,
'new_role': rights_manager.ROLE_EDITOR}]
).put()
collection_models.CollectionCommitLogEntryModel(
id='rights-col_1_id-1',
user_id=self.USER_1_GAE_ID,
username='user',
collection_id='col_1_id',
commit_type='edit',
commit_message='commit message 2',
commit_cmds=[{
'cmd': rights_manager.CMD_CHANGE_ROLE,
'assignee_id': self.USER_1_GAE_ID,
'old_role': rights_manager.ROLE_NONE,
'new_role': rights_manager.ROLE_EDITOR}],
version=1,
post_commit_status='public'
).put()
collection_models.CollectionRightsSnapshotMetadataModel(
id='col_2_id-1',
committer_id=self.USER_1_GAE_ID,
commit_type='edit',
commit_message='commit message 2',
commit_cmds=[{
'cmd': rights_manager.CMD_CHANGE_ROLE,
'assignee_id': self.USER_1_GAE_ID,
'old_role': rights_manager.ROLE_NONE,
'new_role': rights_manager.ROLE_EDITOR}]
).put()
collection_models.CollectionCommitLogEntryModel(
id='rights-col_2_id-1',
user_id=self.USER_1_GAE_ID,
username='user',
collection_id='col_1_id',
commit_type='edit',
commit_message='commit message 2',
commit_cmds=[{
'cmd': rights_manager.CMD_CHANGE_ROLE,
'assignee_id': self.USER_1_GAE_ID,
'old_role': rights_manager.ROLE_NONE,
'new_role': rights_manager.ROLE_EDITOR}],
version=1,
post_commit_status='public'
).put()
output = self._run_one_off_job()
self.assertItemsEqual(
output, [['SUCCESS - CollectionRightsSnapshotMetadataModel', 2]])
collection_models.CollectionRightsSnapshotMetadataModel(
id='col_1_id-1',
committer_id=self.USER_1_GAE_ID,
commit_type='edit',
commit_message='commit message 2',
commit_cmds=[{
'cmd': rights_manager.CMD_CHANGE_ROLE,
'assignee_id': self.USER_1_GAE_ID,
'old_role': rights_manager.ROLE_NONE,
'new_role': rights_manager.ROLE_EDITOR}]
).put()
collection_models.CollectionCommitLogEntryModel(
id='rights-col_1_id-1',
user_id=self.USER_1_GAE_ID,
username='user',
collection_id='col_1_id',
commit_type='edit',
commit_message='commit message 2',
commit_cmds=[{
'cmd': rights_manager.CMD_CHANGE_ROLE,
'assignee_id': self.USER_1_GAE_ID,
'old_role': rights_manager.ROLE_NONE,
'new_role': rights_manager.ROLE_EDITOR}],
version=1,
post_commit_status='public'
).put()
collection_models.CollectionRightsSnapshotMetadataModel(
id='col_2_id-1',
committer_id=self.USER_1_GAE_ID,
commit_type='edit',
commit_message='commit message 2',
commit_cmds=[{
'cmd': rights_manager.CMD_CHANGE_ROLE,
'assignee_id': self.USER_1_GAE_ID,
'old_role': rights_manager.ROLE_NONE,
'new_role': rights_manager.ROLE_EDITOR}]
).put()
collection_models.CollectionCommitLogEntryModel(
id='rights-col_2_id-1',
user_id=self.USER_1_GAE_ID,
username='user',
collection_id='col_1_id',
commit_type='edit',
commit_message='commit message 2',
commit_cmds=[{
'cmd': rights_manager.CMD_CHANGE_ROLE,
'assignee_id': self.USER_1_GAE_ID,
'old_role': rights_manager.ROLE_NONE,
'new_role': rights_manager.ROLE_EDITOR}],
version=1,
post_commit_status='public'
).put()
output = self._run_one_off_job()
self.assertItemsEqual(
output, [['SUCCESS - CollectionRightsSnapshotMetadataModel', 2]])
rights_snapshot_model = (
collection_models.CollectionRightsSnapshotMetadataModel.get_by_id(
'col_1_id-1'))
self.assertEqual(
rights_snapshot_model.commit_cmds[0]['assignee_id'],
self.USER_1_USER_ID)
rights_commit_model = (
collection_models.CollectionCommitLogEntryModel.get_by_id(
'rights-col_1_id-1'))
self.assertEqual(
rights_commit_model.commit_cmds[0]['assignee_id'],
self.USER_1_USER_ID)
rights_snapshot_model = (
collection_models.CollectionRightsSnapshotMetadataModel.get_by_id(
'col_2_id-1'))
self.assertEqual(
rights_snapshot_model.commit_cmds[0]['assignee_id'],
self.USER_1_USER_ID)
rights_commit_model = (
collection_models.CollectionCommitLogEntryModel.get_by_id(
'rights-col_2_id-1'))
self.assertEqual(
rights_commit_model.commit_cmds[0]['assignee_id'],
self.USER_1_USER_ID)
def test_idempotent_change_model_half_migrated(self):
collection_models.CollectionRightsSnapshotMetadataModel(
id='col_1_id-1',
committer_id=self.USER_1_GAE_ID,
commit_type='edit',
commit_message='commit message 2',
commit_cmds=[{
'cmd': rights_manager.CMD_CHANGE_ROLE,
'assignee_id': self.USER_1_GAE_ID,
'old_role': rights_manager.ROLE_NONE,
'new_role': rights_manager.ROLE_EDITOR}]
).put()
collection_models.CollectionCommitLogEntryModel(
id='rights-col_1_id-1',
user_id=self.USER_1_GAE_ID,
username='user',
collection_id='col_1_id',
commit_type='edit',
commit_message='commit message 2',
commit_cmds=[{
'cmd': rights_manager.CMD_CHANGE_ROLE,
'assignee_id': self.USER_1_GAE_ID,
'old_role': rights_manager.ROLE_NONE,
'new_role': rights_manager.ROLE_EDITOR}],
version=1,
post_commit_status='public'
).put()
collection_models.CollectionRightsSnapshotMetadataModel(
id='col_2_id-1',
committer_id=self.USER_1_GAE_ID,
commit_type='edit',
commit_message='commit message 2',
commit_cmds=[{
'cmd': rights_manager.CMD_CHANGE_ROLE,
'assignee_id': self.USER_1_GAE_ID,
'old_role': rights_manager.ROLE_NONE,
'new_role': rights_manager.ROLE_EDITOR}]
).put()
collection_models.CollectionCommitLogEntryModel(
id='rights-col_2_id-1',
user_id=self.USER_1_GAE_ID,
username='user',
collection_id='col_1_id',
commit_type='edit',
commit_message='commit message 2',
commit_cmds=[{
'cmd': rights_manager.CMD_CHANGE_ROLE,
'assignee_id': self.USER_1_GAE_ID,
'old_role': rights_manager.ROLE_NONE,
'new_role': rights_manager.ROLE_EDITOR}],
version=1,
post_commit_status='public'
).put()
output = self._run_one_off_job()
self.assertItemsEqual(
output, [['SUCCESS - CollectionRightsSnapshotMetadataModel', 2]])
collection_models.CollectionRightsSnapshotMetadataModel(
id='col_1_id-1',
committer_id=self.USER_1_GAE_ID,
commit_type='edit',
commit_message='commit message 2',
commit_cmds=[{
'cmd': rights_manager.CMD_CHANGE_ROLE,
'assignee_id': self.USER_1_GAE_ID,
'old_role': rights_manager.ROLE_NONE,
'new_role': rights_manager.ROLE_EDITOR}]
).put()
collection_models.CollectionCommitLogEntryModel(
id='rights-col_1_id-1',
user_id=self.USER_1_GAE_ID,
username='user',
collection_id='col_1_id',
commit_type='edit',
commit_message='commit message 2',
commit_cmds=[{
'cmd': rights_manager.CMD_CHANGE_ROLE,
'assignee_id': self.USER_1_GAE_ID,
'old_role': rights_manager.ROLE_NONE,
'new_role': rights_manager.ROLE_EDITOR}],
version=1,
post_commit_status='public'
).put()
output = self._run_one_off_job()
self.assertItemsEqual(
output,
[['SUCCESS - CollectionRightsSnapshotMetadataModel', 1],
['SUCCESS_ALREADY_MIGRATED - '
'CollectionRightsSnapshotMetadataModel', 1]])
rights_snapshot_model = (
collection_models.CollectionRightsSnapshotMetadataModel.get_by_id(
'col_1_id-1'))
self.assertEqual(
rights_snapshot_model.commit_cmds[0]['assignee_id'],
self.USER_1_USER_ID)
rights_commit_model = (
collection_models.CollectionCommitLogEntryModel.get_by_id(
'rights-col_1_id-1'))
self.assertEqual(
rights_commit_model.commit_cmds[0]['assignee_id'],
self.USER_1_USER_ID)
rights_snapshot_model = (
collection_models.CollectionRightsSnapshotMetadataModel.get_by_id(
'col_2_id-1'))
self.assertEqual(
rights_snapshot_model.commit_cmds[0]['assignee_id'],
self.USER_1_USER_ID)
rights_commit_model = (
collection_models.CollectionCommitLogEntryModel.get_by_id(
'rights-col_2_id-1'))
self.assertEqual(
rights_commit_model.commit_cmds[0]['assignee_id'],
self.USER_1_USER_ID)
def test_idempotent_change_model_all_migrated(self):
collection_models.CollectionRightsSnapshotMetadataModel(
id='col_1_id-1',
committer_id=self.USER_1_GAE_ID,
commit_type='edit',
commit_message='commit message 2',
commit_cmds=[{
'cmd': rights_manager.CMD_CHANGE_ROLE,
'assignee_id': self.USER_1_GAE_ID,
'old_role': rights_manager.ROLE_NONE,
'new_role': rights_manager.ROLE_EDITOR}]
).put()
collection_models.CollectionCommitLogEntryModel(
id='rights-col_1_id-1',
user_id=self.USER_1_GAE_ID,
username='user',
collection_id='col_1_id',
commit_type='edit',
commit_message='commit message 2',
commit_cmds=[{
'cmd': rights_manager.CMD_CHANGE_ROLE,
'assignee_id': self.USER_1_GAE_ID,
'old_role': rights_manager.ROLE_NONE,
'new_role': rights_manager.ROLE_EDITOR}],
version=1,
post_commit_status='public'
).put()
collection_models.CollectionRightsSnapshotMetadataModel(
id='col_2_id-1',
committer_id=self.USER_1_GAE_ID,
commit_type='edit',
commit_message='commit message 2',
commit_cmds=[{
'cmd': rights_manager.CMD_CHANGE_ROLE,
'assignee_id': self.USER_1_GAE_ID,
'old_role': rights_manager.ROLE_NONE,
'new_role': rights_manager.ROLE_EDITOR}]
).put()
collection_models.CollectionCommitLogEntryModel(
id='rights-col_2_id-1',
user_id=self.USER_1_GAE_ID,
username='user',
collection_id='col_1_id',
commit_type='edit',
commit_message='commit message 2',
commit_cmds=[{
'cmd': rights_manager.CMD_CHANGE_ROLE,
'assignee_id': self.USER_1_GAE_ID,
'old_role': rights_manager.ROLE_NONE,
'new_role': rights_manager.ROLE_EDITOR}],
version=1,
post_commit_status='public'
).put()
output = self._run_one_off_job()
self.assertItemsEqual(
output, [['SUCCESS - CollectionRightsSnapshotMetadataModel', 2]])
output = self._run_one_off_job()
self.assertItemsEqual(
output,
[['SUCCESS_ALREADY_MIGRATED - '
'CollectionRightsSnapshotMetadataModel', 2]])
rights_snapshot_model = (
collection_models.CollectionRightsSnapshotMetadataModel.get_by_id(
'col_1_id-1'))
self.assertEqual(
rights_snapshot_model.commit_cmds[0]['assignee_id'],
self.USER_1_USER_ID)
rights_commit_model = (
collection_models.CollectionCommitLogEntryModel.get_by_id(
'rights-col_1_id-1'))
self.assertEqual(
rights_commit_model.commit_cmds[0]['assignee_id'],
self.USER_1_USER_ID)
rights_snapshot_model = (
collection_models.CollectionRightsSnapshotMetadataModel.get_by_id(
'col_2_id-1'))
self.assertEqual(
rights_snapshot_model.commit_cmds[0]['assignee_id'],
self.USER_1_USER_ID)
rights_commit_model = (
collection_models.CollectionCommitLogEntryModel.get_by_id(
'rights-col_2_id-1'))
self.assertEqual(
rights_commit_model.commit_cmds[0]['assignee_id'],
self.USER_1_USER_ID)
class GaeIdNotInModelsVerificationJobTests(test_utils.GenericTestBase):
"""Tests for GaeIdNotInModelsVerificationJob."""
USER_1_USER_ID = 'uid_aaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaa'
USER_1_GAE_ID = 'gae_id_1'
USER_2_USER_ID = 'uid_bbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbb'
USER_2_GAE_ID = 'gae_id_2'
USER_3_USER_ID = 'uid_cccccccccccccccccccccccccccccccc'
USER_3_GAE_ID = 'gae_id_3'
def _run_one_off_job(self):
"""Runs the one-off MapReduce job."""
job_id = user_id_migration.GaeIdNotInModelsVerificationJob.create_new()
user_id_migration.GaeIdNotInModelsVerificationJob.enqueue(job_id)
self.assertEqual(
self.count_jobs_in_taskqueue(
taskqueue_services.QUEUE_NAME_ONE_OFF_JOBS), 1)
self.process_and_flush_pending_tasks()
stringified_output = (
user_id_migration.GaeIdNotInModelsVerificationJob.get_output(
job_id))
eval_output = []
for stringified_item in stringified_output:
items = ast.literal_eval(stringified_item)
if isinstance(items[1], int):
eval_output.append([items[0], items[1]])
else:
user_ids = [ast.literal_eval(item) for item in items[1]]
eval_output.append([items[0], user_ids])
return eval_output
def setUp(self):
def empty(*_):
"""Function that takes any number of arguments and does nothing."""
pass
# We don't want to signup the superadmin user.
with self.swap(test_utils.TestBase, 'signup_superadmin_user', empty):
super(GaeIdNotInModelsVerificationJobTests, self).setUp()
user_models.UserSettingsModel(
id=self.USER_1_USER_ID,
gae_id=self.USER_1_GAE_ID,
email='some@email.com',
role=feconf.ROLE_ID_COLLECTION_EDITOR
).put()
user_models.UserSettingsModel(
id=self.USER_2_USER_ID,
gae_id=self.USER_2_GAE_ID,
email='some.different@email.com',
role=feconf.ROLE_ID_COLLECTION_EDITOR
).put()
user_models.UserSettingsModel(
id=self.USER_3_USER_ID,
gae_id=self.USER_3_GAE_ID,
email='some.different@email.cz',
role=feconf.ROLE_ID_COLLECTION_EDITOR
).put()
def test_wrong_user_ids(self):
user_models.UserSettingsModel(
id='aa',
gae_id=self.USER_1_GAE_ID,
email='some@email.com',
role=feconf.ROLE_ID_COLLECTION_EDITOR
).put()
user_models.UserSettingsModel(
id='AAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAA',
gae_id=self.USER_2_GAE_ID,
email='some.different@email.com',
role=feconf.ROLE_ID_COLLECTION_EDITOR
).put()
output = self._run_one_off_job()
output = [
key[1] for key in output if key[0] == 'FAILURE - WRONG ID FORMAT'
][0]
self.assertEqual(len(output), 2)
self.assertIn(('gae_id_1', 'aa'), output)
self.assertIn(('gae_id_2', 'AAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAA'), output)
def test_failure(self):
user_models.CompletedActivitiesModel(
id=self.USER_1_GAE_ID,
exploration_ids=['1', '2'],
collection_ids=['1', '2']
).put()
user_models.CompletedActivitiesModel(
id=self.USER_2_GAE_ID,
exploration_ids=['1', '2'],
collection_ids=['1', '2']
).put()
user_models.ExpUserLastPlaythroughModel(
id='%s.%s' % (self.USER_3_GAE_ID, 'exp_id'),
user_id=self.USER_3_GAE_ID,
exploration_id='exp_id',
last_played_exp_version=2,
last_played_state_name='start'
).put()
# Model with DELETION_POLICY equal to NOT_APPLICABLE.
activity_models.ActivityReferencesModel(id='some_id').put()
output = self._run_one_off_job()
self.assertNotIn('SUCCESS', [key[0] for key in output])
output = [
key[1] for key in output
if key[0] == 'FAILURE - HAS REFERENCE TO GAE ID'][0]
self.assertEqual(len(output), 3)
self.assertIn((self.USER_1_GAE_ID, 'CompletedActivitiesModel'), output)
self.assertIn((self.USER_2_GAE_ID, 'CompletedActivitiesModel'), output)
self.assertIn(
(self.USER_3_GAE_ID, 'ExpUserLastPlaythroughModel'), output)
def test_success(self):
output = self._run_one_off_job()
self.assertEqual(output, [['SUCCESS', 3]])
class BaseModelsUserIdsHaveUserSettingsVerificationJobTests(
test_utils.GenericTestBase):
def test_entity_classes_to_map_over(self):
with self.assertRaises(NotImplementedError):
(user_id_migration.BaseModelsUserIdsHaveUserSettingsVerificationJob
.entity_classes_to_map_over())
class MockGeneralFeedbackThreadUserModel(
feedback_models.GeneralFeedbackThreadUserModel):
"""Mock GeneralFeedbackThreadUserModel so that it allows empty user_id."""
user_id = ndb.StringProperty(required=False, indexed=True)
class ModelsUserIdsHaveUserSettingsVerificationJobTests(
test_utils.GenericTestBase):
"""Tests for ModelsUserIdsHaveUserSettingsVerificationJob."""
USER_1_USER_ID = 'aaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaa'
USER_1_GAE_ID = 'gae_id_1'
USER_2_USER_ID = 'bbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbb'
USER_2_GAE_ID = 'gae_id_2'
USER_3_USER_ID = 'cccccccccccccccccccccccccccccccc'
USER_3_GAE_ID = 'gae_id_3'
def _run_one_off_job(self):
"""Runs the one-off MapReduce job."""
job_id = (
user_id_migration.ModelsUserIdsHaveUserSettingsVerificationJob
.create_new())
(user_id_migration.ModelsUserIdsHaveUserSettingsVerificationJob
.enqueue(job_id))
self.assertEqual(
self.count_jobs_in_taskqueue(
taskqueue_services.QUEUE_NAME_ONE_OFF_JOBS), 1)
self.process_and_flush_pending_tasks()
stringified_output = (
user_id_migration.ModelsUserIdsHaveUserSettingsVerificationJob
.get_output(job_id))
eval_output = [ast.literal_eval(stringified_item) for
stringified_item in stringified_output]
return eval_output
def setUp(self):
def empty(*_):
"""Function that takes any number of arguments and does nothing."""
pass
# We don't want to signup the superadmin user.
with self.swap(test_utils.TestBase, 'signup_superadmin_user', empty):
super(
ModelsUserIdsHaveUserSettingsVerificationJobTests, self).setUp()
user_models.UserSettingsModel(
id=self.USER_1_USER_ID,
gae_id=self.USER_1_GAE_ID,
email='some@email.com',
role=feconf.ROLE_ID_COLLECTION_EDITOR
).put()
user_models.UserSettingsModel(
id=self.USER_2_USER_ID,
gae_id=self.USER_2_GAE_ID,
email='some.different@email.com',
role=feconf.ROLE_ID_COLLECTION_EDITOR
).put()
user_models.UserSettingsModel(
id=self.USER_3_USER_ID,
gae_id=self.USER_3_GAE_ID,
email='some.different@email.cz',
role=feconf.ROLE_ID_COLLECTION_EDITOR
).put()
def test_one_user_one_model_full_id(self):
user_models.CompletedActivitiesModel(
id=self.USER_1_GAE_ID,
exploration_ids=['1', '2'],
collection_ids=['1', '2']).put()
user_models.CompletedActivitiesModel(
id=feconf.MIGRATION_BOT_USER_ID,
exploration_ids=['1', '2'],
collection_ids=['1', '2']).put()
user_models.ExpUserLastPlaythroughModel(
id='%s.%s' % (self.USER_2_GAE_ID, 'exp_id'),
user_id=self.USER_2_GAE_ID,
exploration_id='exp_id',
last_played_exp_version=2,
last_played_state_name='start').put()
user_models.ExpUserLastPlaythroughModel(
id='%s.%s' % (feconf.SYSTEM_COMMITTER_ID, 'exp_id'),
user_id=feconf.SYSTEM_COMMITTER_ID,
exploration_id='exp_id',
last_played_exp_version=2,
last_played_state_name='start').put()
user_models.UserContributionScoringModel(
id='%s.%s' % ('category', self.USER_2_GAE_ID),
user_id=self.USER_2_USER_ID,
score_category='category',
score=1.5,
has_email_been_sent=False).put()
feedback_models.GeneralFeedbackMessageModel(
id='type.id.generated',
thread_id='thread_id',
message_id=2).put()
topic_models.TopicRightsModel.put_multi([
topic_models.TopicRightsModel(
id='topic_1_id',
manager_ids=[self.USER_1_GAE_ID])])
topic_models.TopicRightsModel.put_multi([
topic_models.TopicRightsModel(
id='topic_2_id',
manager_ids=[feconf.SYSTEM_COMMITTER_ID])])
with self.swap(
feedback_models, 'GeneralFeedbackThreadUserModel',
MockGeneralFeedbackThreadUserModel
):
feedback_models.GeneralFeedbackThreadUserModel(
id='%s.thread_id' % self.USER_1_USER_ID,
user_id=self.USER_1_USER_ID,
thread_id='thread_id').put()
feedback_models.GeneralFeedbackThreadUserModel(
id='%s.thread_id' % None,
user_id=None,
thread_id='thread_id').put()
output = self._run_one_off_job()
self.assertItemsEqual([
['SUCCESS - UserSettingsModel', 3],
['FAILURE - CompletedActivitiesModel', [self.USER_1_GAE_ID]],
['SUCCESS - CompletedActivitiesModel', 1],
['FAILURE - ExpUserLastPlaythroughModel',
['%s.%s' % (self.USER_2_GAE_ID, 'exp_id')]],
['SUCCESS - ExpUserLastPlaythroughModel', 1],
['FAILURE - UserContributionScoringModel',
['%s.%s' % ('category', self.USER_2_GAE_ID)]],
['SUCCESS_NONE - GeneralFeedbackMessageModel', 1],
['SUCCESS - MockGeneralFeedbackThreadUserModel', 1],
['FAILURE_NONE - MockGeneralFeedbackThreadUserModel',
['None.thread_id']],
['FAILURE - TopicRightsModel', ['topic_1_id']],
['SUCCESS - TopicRightsModel', 1],
], output)
class ModelsUserIdsHaveUserSettingsExplorationsVerificationJobTests(
test_utils.GenericTestBase):
"""Tests for ModelsUserIdsHaveUserSettingsExplorationsVerificationJob."""
USER_1_USER_ID = 'aaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaa'
USER_1_GAE_ID = 'gae_id_1'
USER_1_USERNAME = 'username_1'
def _run_one_off_job(self):
"""Runs the one-off MapReduce job."""
job_id = (
user_id_migration
.ModelsUserIdsHaveUserSettingsExplorationsVerificationJob
.create_new())
(user_id_migration
.ModelsUserIdsHaveUserSettingsExplorationsVerificationJob
.enqueue(job_id))
self.assertEqual(
self.count_jobs_in_taskqueue(
taskqueue_services.QUEUE_NAME_ONE_OFF_JOBS), 1)
self.process_and_flush_pending_tasks()
stringified_output = (
user_id_migration
.ModelsUserIdsHaveUserSettingsExplorationsVerificationJob
.get_output(job_id))
eval_output = [ast.literal_eval(stringified_item) for
stringified_item in stringified_output]
return eval_output
def setUp(self):
def empty(*_):
"""Function that takes any number of arguments and does nothing."""
pass
# We don't want to signup the superadmin user.
with self.swap(test_utils.TestBase, 'signup_superadmin_user', empty):
super(
ModelsUserIdsHaveUserSettingsExplorationsVerificationJobTests,
self).setUp()
user_models.UserSettingsModel(
id=self.USER_1_USER_ID,
gae_id=self.USER_1_GAE_ID,
email='some@email.com',
role=feconf.ROLE_ID_COLLECTION_EDITOR
).put()
def test_one_user_one_model_full_id(self):
exp_models.ExplorationCommitLogEntryModel(
id='exp_1_id_1',
user_id=self.USER_1_GAE_ID,
username=self.USER_1_USERNAME,
commit_type='create',
commit_message='commit message 2',
commit_cmds=[{'cmd': 'some_command'}],
post_commit_status='private',
post_commit_community_owned=False,
post_commit_is_private=True,
version=2,
exploration_id='exp_1_id').put()
exp_models.ExplorationCommitLogEntryModel(
id='exp_2_id_1',
user_id=self.USER_1_USER_ID,
username=self.USER_1_USERNAME,
commit_type='create',
commit_message='commit message 2',
commit_cmds=[{'cmd': 'some_command'}],
post_commit_status='private',
post_commit_community_owned=False,
post_commit_is_private=True,
version=2,
exploration_id='exp_2_id').put()
exp_models.ExplorationCommitLogEntryModel(
id='exp_3_id_1',
user_id=feconf.SYSTEM_COMMITTER_ID,
username=feconf.SYSTEM_COMMITTER_ID,
commit_type='create',
commit_message='commit message 2',
commit_cmds=[{'cmd': 'some_command'}],
post_commit_status='private',
post_commit_community_owned=False,
post_commit_is_private=True,
version=2,
exploration_id='exp_3_id').put()
exp_models.ExplorationSnapshotMetadataModel(
id='exp_1_id',
committer_id=self.USER_1_GAE_ID,
commit_type='create',
commit_message='commit message 2',
commit_cmds=[{'cmd': 'some_command'}]).put()
exp_models.ExplorationSnapshotMetadataModel(
id='exp_2_id',
committer_id=self.USER_1_USER_ID,
commit_type='create',
commit_message='commit message 2',
commit_cmds=[{'cmd': 'some_command'}]).put()
exp_models.ExplorationSnapshotMetadataModel(
id='exp_3_id',
committer_id=feconf.SYSTEM_COMMITTER_ID,
commit_type='create',
commit_message='commit message 2',
commit_cmds=[{'cmd': 'some_command'}]).put()
exp_models.ExplorationSnapshotMetadataModel(
id='exp_4_id',
committer_id=feconf.SUGGESTION_BOT_USER_ID,
commit_type='create',
commit_message='commit message 2',
commit_cmds=[{'cmd': 'some_command'}]).put()
output = self._run_one_off_job()
self.assertIn(
['FAILURE - ExplorationCommitLogEntryModel', ['exp_1_id_1']],
output)
self.assertIn(
['SUCCESS - ExplorationCommitLogEntryModel', 2], output)
self.assertIn(
['FAILURE - ExplorationSnapshotMetadataModel', ['exp_1_id']],
output)
self.assertIn(
['SUCCESS - ExplorationSnapshotMetadataModel', 3], output)
class AddAllUserIdsVerificationJobTests(test_utils.GenericTestBase):
COL_1_ID = 'col_1_id'
COL_2_ID = 'col_2_id'
EXP_1_ID = 'exp_1_id'
EXP_2_ID = 'exp_2_id'
TOP_1_ID = 'top_1_id'
TOP_2_ID = 'top_2_id'
USER_1_ID = 'user_1_id'
USER_2_ID = 'user_2_id'
USER_3_ID = 'user_3_id'
USER_4_ID = 'user_4_id'
def _run_one_off_job(self):
"""Runs the one-off MapReduce job."""
job_id = user_id_migration.AddAllUserIdsVerificationJob.create_new()
user_id_migration.AddAllUserIdsVerificationJob.enqueue(job_id)
self.assertEqual(
self.count_jobs_in_taskqueue(
taskqueue_services.QUEUE_NAME_ONE_OFF_JOBS), 1)
self.process_and_flush_pending_tasks()
stringified_output = (
user_id_migration.AddAllUserIdsVerificationJob.get_output(job_id))
eval_output = [ast.literal_eval(stringified_item) for
stringified_item in stringified_output]
return eval_output
def test_one_collection_rights_subset(self):
collection_models.CollectionRightsAllUsersModel(
id=self.COL_1_ID,
all_user_ids=[self.USER_1_ID, self.USER_2_ID, self.USER_3_ID]
).put()
collection_models.CollectionRightsModel.put_multi([
collection_models.CollectionRightsModel(
id=self.COL_1_ID,
owner_ids=[self.USER_1_ID, self.USER_2_ID],
editor_ids=[self.USER_2_ID],
voice_artist_ids=[self.USER_3_ID],
viewer_ids=[],
community_owned=False,
status=constants.ACTIVITY_STATUS_PUBLIC,
viewable_if_private=False,
first_published_msec=0.0)])
output = self._run_one_off_job()
self.assertEqual(output, [['SUCCESS-SUBSET-CollectionRightsModel', 1]])
self.assertItemsEqual(
[self.USER_1_ID, self.USER_2_ID, self.USER_3_ID],
collection_models.CollectionRightsAllUsersModel
.get_by_id(self.COL_1_ID).all_user_ids)
def test_one_exploration_rights_subset(self):
exp_models.ExplorationRightsAllUsersModel(
id=self.EXP_1_ID,
all_user_ids=[self.USER_1_ID, self.USER_2_ID, self.USER_4_ID]
).put()
exp_models.ExplorationRightsModel.put_multi([
exp_models.ExplorationRightsModel(
id=self.EXP_1_ID,
owner_ids=[self.USER_1_ID, self.USER_2_ID],
editor_ids=[self.USER_2_ID],
voice_artist_ids=[],
viewer_ids=[self.USER_4_ID],
community_owned=False,
status=constants.ACTIVITY_STATUS_PUBLIC,
viewable_if_private=False,
first_published_msec=0.0)])
output = self._run_one_off_job()
self.assertEqual(output, [['SUCCESS-SUBSET-ExplorationRightsModel', 1]])
self.assertItemsEqual(
[self.USER_1_ID, self.USER_2_ID, self.USER_4_ID],
exp_models.ExplorationRightsAllUsersModel.get_by_id(self.EXP_1_ID)
.all_user_ids)
def test_one_topic_rights_subset(self):
topic_models.TopicRightsAllUsersModel(
id=self.TOP_1_ID,
all_user_ids=[self.USER_1_ID, self.USER_2_ID]
).put()
topic_models.TopicRightsModel.put_multi([
topic_models.TopicRightsModel(
id=self.TOP_1_ID,
manager_ids=[self.USER_1_ID, self.USER_2_ID])])
output = self._run_one_off_job()
self.assertEqual(output, [['SUCCESS-SUBSET-TopicRightsModel', 1]])
self.assertItemsEqual(
[self.USER_1_ID, self.USER_2_ID],
topic_models.TopicRightsAllUsersModel.get_by_id(self.TOP_1_ID)
.all_user_ids)
def test_one_collection_rights_not_subset(self):
collection_models.CollectionRightsAllUsersModel(
id=self.COL_1_ID,
all_user_ids=[self.USER_1_ID, self.USER_2_ID, self.USER_4_ID]
).put()
collection_models.CollectionRightsModel.put_multi([
collection_models.CollectionRightsModel(
id=self.COL_1_ID,
owner_ids=[self.USER_1_ID, self.USER_2_ID],
editor_ids=[self.USER_2_ID],
voice_artist_ids=[self.USER_3_ID],
viewer_ids=[],
community_owned=False,
status=constants.ACTIVITY_STATUS_PUBLIC,
viewable_if_private=False,
first_published_msec=0.0)])
output = self._run_one_off_job()
self.assertEqual(
output,
[['SUCCESS-NOT_SUBSET-CollectionRightsModel', 1]])
self.assertItemsEqual(
[self.USER_1_ID, self.USER_2_ID, self.USER_3_ID, self.USER_4_ID],
collection_models.CollectionRightsAllUsersModel
.get_by_id(self.COL_1_ID).all_user_ids)
def test_one_exploration_rights_not_subset(self):
exp_models.ExplorationRightsAllUsersModel(
id=self.EXP_1_ID,
all_user_ids=[self.USER_1_ID, self.USER_2_ID, self.USER_3_ID]
).put()
exp_models.ExplorationRightsModel.put_multi([
exp_models.ExplorationRightsModel(
id=self.EXP_1_ID,
owner_ids=[self.USER_1_ID, self.USER_2_ID],
editor_ids=[self.USER_2_ID],
voice_artist_ids=[],
viewer_ids=[self.USER_4_ID],
community_owned=False,
status=constants.ACTIVITY_STATUS_PUBLIC,
viewable_if_private=False,
first_published_msec=0.0)])
output = self._run_one_off_job()
self.assertEqual(
output, [['SUCCESS-NOT_SUBSET-ExplorationRightsModel', 1]])
self.assertItemsEqual(
[self.USER_1_ID, self.USER_2_ID, self.USER_3_ID, self.USER_4_ID],
exp_models.ExplorationRightsAllUsersModel.get_by_id(self.EXP_1_ID)
.all_user_ids)
def test_one_topic_rights_not_subset(self):
topic_models.TopicRightsAllUsersModel(
id=self.TOP_1_ID,
all_user_ids=[self.USER_1_ID, self.USER_2_ID, self.USER_3_ID]
).put()
topic_models.TopicRightsModel.put_multi([
topic_models.TopicRightsModel(
id=self.TOP_1_ID,
manager_ids=[self.USER_1_ID, self.USER_2_ID])])
output = self._run_one_off_job()
self.assertEqual(output, [['SUCCESS-NOT_SUBSET-TopicRightsModel', 1]])
self.assertItemsEqual(
[self.USER_1_ID, self.USER_2_ID, self.USER_3_ID],
topic_models.TopicRightsAllUsersModel.get_by_id(self.TOP_1_ID)
.all_user_ids)
def test_one_collection_rights_failure(self):
collection_models.CollectionRightsModel.put_multi([
collection_models.CollectionRightsModel(
id=self.COL_1_ID,
owner_ids=[self.USER_1_ID, self.USER_2_ID],
editor_ids=[self.USER_2_ID],
voice_artist_ids=[self.USER_3_ID],
viewer_ids=[],
community_owned=False,
status=constants.ACTIVITY_STATUS_PUBLIC,
viewable_if_private=False,
first_published_msec=0.0)])
output = self._run_one_off_job()
self.assertEqual(
output, [['FAILURE-CollectionRightsModel', [self.COL_1_ID]]])
def test_one_exploration_rights_failure(self):
exp_models.ExplorationRightsModel.put_multi([
exp_models.ExplorationRightsModel(
id=self.EXP_1_ID,
owner_ids=[self.USER_1_ID, self.USER_2_ID],
editor_ids=[self.USER_2_ID],
voice_artist_ids=[],
viewer_ids=[self.USER_4_ID],
community_owned=False,
status=constants.ACTIVITY_STATUS_PUBLIC,
viewable_if_private=False,
first_published_msec=0.0)])
output = self._run_one_off_job()
self.assertEqual(
output, [['FAILURE-ExplorationRightsModel', [self.EXP_1_ID]]])
def test_one_topic_rights_failure(self):
topic_models.TopicRightsModel.put_multi([
topic_models.TopicRightsModel(
id=self.TOP_1_ID,
manager_ids=[self.USER_1_ID, self.USER_2_ID])])
output = self._run_one_off_job()
self.assertEqual(
output, [['FAILURE-TopicRightsModel', [self.TOP_1_ID]]])
def test_multiple_rights(self):
collection_models.CollectionRightsAllUsersModel(
id=self.COL_1_ID,
all_user_ids=[self.USER_1_ID]
).put()
collection_models.CollectionRightsAllUsersModel(
id=self.COL_2_ID,
all_user_ids=[self.USER_1_ID, self.USER_2_ID, self.USER_3_ID]
).put()
collection_models.CollectionRightsModel.put_multi([
collection_models.CollectionRightsModel(
id=self.COL_1_ID,
owner_ids=[self.USER_1_ID],
editor_ids=[],
voice_artist_ids=[],
viewer_ids=[],
community_owned=False,
status=constants.ACTIVITY_STATUS_PUBLIC,
viewable_if_private=False,
first_published_msec=0.0),
collection_models.CollectionRightsModel(
id=self.COL_2_ID,
owner_ids=[self.USER_1_ID],
editor_ids=[],
voice_artist_ids=[],
viewer_ids=[],
community_owned=False,
status=constants.ACTIVITY_STATUS_PUBLIC,
viewable_if_private=False,
first_published_msec=0.0)])
exp_models.ExplorationRightsAllUsersModel(
id=self.EXP_1_ID,
all_user_ids=[self.USER_1_ID, self.USER_2_ID, self.USER_4_ID]
).put()
exp_models.ExplorationRightsAllUsersModel(
id=self.EXP_2_ID,
all_user_ids=[self.USER_1_ID, self.USER_2_ID, self.USER_4_ID]
).put()
exp_models.ExplorationRightsModel.put_multi([
exp_models.ExplorationRightsModel(
id=self.EXP_1_ID,
owner_ids=[self.USER_1_ID],
editor_ids=[self.USER_2_ID],
voice_artist_ids=[self.USER_3_ID],
viewer_ids=[self.USER_4_ID],
community_owned=False,
status=constants.ACTIVITY_STATUS_PUBLIC,
viewable_if_private=False,
first_published_msec=0.0),
exp_models.ExplorationRightsModel(
id=self.EXP_2_ID,
owner_ids=[self.USER_1_ID],
editor_ids=[],
voice_artist_ids=[],
viewer_ids=[],
community_owned=False,
status=constants.ACTIVITY_STATUS_PUBLIC,
viewable_if_private=False,
first_published_msec=0.0)])
topic_models.TopicRightsAllUsersModel(
id=self.TOP_1_ID,
all_user_ids=[self.USER_3_ID, self.USER_4_ID]
).put()
topic_models.TopicRightsAllUsersModel(
id=self.TOP_2_ID,
all_user_ids=[self.USER_1_ID, self.USER_2_ID]
).put()
topic_models.TopicRightsModel.put_multi([
topic_models.TopicRightsModel(
id=self.TOP_1_ID,
manager_ids=[self.USER_3_ID, self.USER_4_ID]),
topic_models.TopicRightsModel(
id=self.TOP_2_ID,
manager_ids=[self.USER_1_ID, self.USER_2_ID])])
output = self._run_one_off_job()
self.assertIn(['SUCCESS-SUBSET-CollectionRightsModel', 1], output)
self.assertIn(
['SUCCESS-NOT_SUBSET-CollectionRightsModel', 1], output)
self.assertIn(['SUCCESS-SUBSET-ExplorationRightsModel', 1], output)
self.assertIn(
['SUCCESS-NOT_SUBSET-ExplorationRightsModel', 1], output)
self.assertIn(['SUCCESS-SUBSET-TopicRightsModel', 2], output)
self.assertItemsEqual(
[self.USER_1_ID],
collection_models.CollectionRightsAllUsersModel
.get_by_id(self.COL_1_ID).all_user_ids)
self.assertItemsEqual(
[self.USER_1_ID, self.USER_2_ID, self.USER_3_ID],
collection_models.CollectionRightsAllUsersModel
.get_by_id(self.COL_2_ID).all_user_ids)
self.assertItemsEqual(
[self.USER_1_ID, self.USER_2_ID, self.USER_3_ID, self.USER_4_ID],
exp_models.ExplorationRightsAllUsersModel.get_by_id(self.EXP_1_ID)
.all_user_ids)
self.assertItemsEqual(
[self.USER_3_ID, self.USER_4_ID],
topic_models.TopicRightsAllUsersModel.get_by_id(self.TOP_1_ID)
.all_user_ids)
self.assertItemsEqual(
[self.USER_1_ID, self.USER_2_ID],
topic_models.TopicRightsAllUsersModel.get_by_id(self.TOP_2_ID)
.all_user_ids)
class AddAllUserIdsSnapshotContentVerificationJobTests(
test_utils.GenericTestBase):
COL_1_ID = 'col_1_id'
EXP_1_ID = 'exp_1_id'
TOP_1_ID = 'top_1_id'
TOP_2_ID = 'top_2_id'
USER_1_ID = 'user_1_id'
USER_2_ID = 'user_2_id'
USER_3_ID = 'user_3_id'
USER_4_ID = 'user_4_id'
def _run_one_off_job(self):
"""Runs the one-off MapReduce job."""
job_id = (
user_id_migration.AddAllUserIdsSnapshotContentVerificationJob
.create_new())
(user_id_migration.AddAllUserIdsSnapshotContentVerificationJob
.enqueue(job_id))
self.assertEqual(
self.count_jobs_in_taskqueue(
taskqueue_services.QUEUE_NAME_ONE_OFF_JOBS), 1)
self.process_and_flush_pending_tasks()
stringified_output = (
user_id_migration.AddAllUserIdsSnapshotContentVerificationJob
.get_output(job_id))
eval_output = [ast.literal_eval(stringified_item) for
stringified_item in stringified_output]
return [
[key, sorted(values) if isinstance(values, list) else values]
for key, values in eval_output]
def test_one_collection_rights(self):
collection_model = collection_models.CollectionRightsModel(
id=self.COL_1_ID,
owner_ids=[self.USER_1_ID],
editor_ids=[self.USER_2_ID],
voice_artist_ids=[],
viewer_ids=[],
community_owned=False,
status=constants.ACTIVITY_STATUS_PUBLIC,
viewable_if_private=False,
first_published_msec=0.0
)
collection_model.save(
'cid', 'Created new collection rights',
[{'cmd': rights_manager.CMD_CREATE_NEW}])
collection_model.owner_ids = [self.USER_3_ID]
collection_model.save(
'cid', 'Change owner',
[{'cmd': rights_manager.CMD_CHANGE_ROLE}])
output = self._run_one_off_job()
self.assertEqual(
output, [['SUCCESS-CollectionRightsSnapshotContentModel', 2]])
self.assertItemsEqual(
[self.USER_1_ID, self.USER_2_ID, self.USER_3_ID],
collection_models.CollectionRightsAllUsersModel
.get_by_id(self.COL_1_ID).all_user_ids)
def test_one_exploration_rights(self):
exp_model = exp_models.ExplorationRightsModel(
id=self.EXP_1_ID,
owner_ids=[self.USER_1_ID, self.USER_2_ID],
editor_ids=[self.USER_2_ID],
voice_artist_ids=[],
viewer_ids=[self.USER_4_ID],
community_owned=False,
status=constants.ACTIVITY_STATUS_PUBLIC,
viewable_if_private=False,
first_published_msec=0.0)
exp_model.save(
'cid', 'Created new exploration rights',
[{'cmd': rights_manager.CMD_CREATE_NEW}])
exp_model.owner_ids = [self.USER_1_ID, self.USER_3_ID]
exp_model.save(
'cid', 'Change owner',
[{'cmd': rights_manager.CMD_CHANGE_ROLE}])
output = self._run_one_off_job()
self.assertEqual(
output, [['SUCCESS-ExplorationRightsSnapshotContentModel', 2]])
self.assertItemsEqual(
[self.USER_1_ID, self.USER_2_ID, self.USER_3_ID, self.USER_4_ID],
exp_models.ExplorationRightsAllUsersModel.get_by_id(self.EXP_1_ID)
.all_user_ids)
def test_one_topic_rights(self):
topic_model = topic_models.TopicRightsModel(
id=self.TOP_1_ID,
manager_ids=[self.USER_1_ID, self.USER_2_ID])
topic_model.commit(
'cid', 'Created new topic rights',
[{'cmd': rights_manager.CMD_CREATE_NEW}])
topic_model.manager_ids = [self.USER_2_ID, self.USER_3_ID]
topic_model.commit(
'cid', 'Change manager',
[{'cmd': rights_manager.CMD_CHANGE_ROLE}])
output = self._run_one_off_job()
self.assertEqual(
output, [['SUCCESS-TopicRightsSnapshotContentModel', 2]])
self.assertItemsEqual(
[self.USER_1_ID, self.USER_2_ID, self.USER_3_ID],
topic_models.TopicRightsAllUsersModel.get_by_id(self.TOP_1_ID)
.all_user_ids)
def test_multiple_rights(self):
collection_model = collection_models.CollectionRightsModel(
id=self.COL_1_ID,
owner_ids=[self.USER_1_ID],
editor_ids=[],
voice_artist_ids=[],
viewer_ids=[],
community_owned=False,
status=constants.ACTIVITY_STATUS_PUBLIC,
viewable_if_private=False,
first_published_msec=0.0
)
collection_model.save(
'cid', 'Created new collection rights',
[{'cmd': rights_manager.CMD_CREATE_NEW}])
collection_model.editor_ids = [self.USER_1_ID, self.USER_4_ID]
collection_model.save(
'cid', 'Add editors',
[{'cmd': rights_manager.CMD_CHANGE_ROLE}])
exp_model = exp_models.ExplorationRightsModel(
id=self.EXP_1_ID,
owner_ids=[self.USER_1_ID, self.USER_2_ID],
editor_ids=[self.USER_2_ID],
voice_artist_ids=[],
viewer_ids=[self.USER_4_ID],
community_owned=False,
status=constants.ACTIVITY_STATUS_PUBLIC,
viewable_if_private=False,
first_published_msec=0.0)
exp_model.save(
'cid', 'Created new exploration rights',
[{'cmd': rights_manager.CMD_CREATE_NEW}])
exp_model.owner_ids = [self.USER_1_ID, self.USER_3_ID]
exp_model.save(
'cid', 'Change owner',
[{'cmd': rights_manager.CMD_CHANGE_ROLE}])
topic_model_1 = topic_models.TopicRightsModel(
id=self.TOP_1_ID,
manager_ids=[self.USER_1_ID, self.USER_2_ID])
topic_model_1.commit(
'cid', 'Created new topic rights',
[{'cmd': rights_manager.CMD_CREATE_NEW}])
topic_model_1.manager_ids = [self.USER_2_ID, self.USER_3_ID]
topic_model_1.commit(
'cid', 'Change manager',
[{'cmd': rights_manager.CMD_CHANGE_ROLE}])
topic_model_2 = topic_models.TopicRightsModel(
id=self.TOP_2_ID,
manager_ids=[self.USER_1_ID])
topic_model_2.commit(
'cid', 'Created new topic rights',
[{'cmd': rights_manager.CMD_CREATE_NEW}])
topic_model_2.manager_ids = [self.USER_4_ID]
topic_model_2.commit(
'cid', 'Change manager',
[{'cmd': rights_manager.CMD_CHANGE_ROLE}])
output = self._run_one_off_job()
self.assertIn(
['SUCCESS-CollectionRightsSnapshotContentModel', 2], output)
self.assertIn(
['SUCCESS-ExplorationRightsSnapshotContentModel', 2], output)
self.assertIn(['SUCCESS-TopicRightsSnapshotContentModel', 4], output)
self.assertItemsEqual(
[self.USER_1_ID, self.USER_4_ID],
collection_models.CollectionRightsAllUsersModel
.get_by_id(self.COL_1_ID).all_user_ids)
self.assertItemsEqual(
[self.USER_1_ID, self.USER_2_ID, self.USER_3_ID, self.USER_4_ID],
exp_models.ExplorationRightsAllUsersModel.get_by_id(self.EXP_1_ID)
.all_user_ids)
self.assertItemsEqual(
[self.USER_1_ID, self.USER_2_ID, self.USER_3_ID],
topic_models.TopicRightsAllUsersModel.get_by_id(self.TOP_1_ID)
.all_user_ids)
self.assertItemsEqual(
[self.USER_1_ID, self.USER_4_ID],
topic_models.TopicRightsAllUsersModel.get_by_id(self.TOP_2_ID)
.all_user_ids)
class AddAllUserIdsSnapshotMetadataVerificationJobTests(
test_utils.GenericTestBase):
COL_1_ID = 'col_1_id'
EXP_1_ID = 'exp_1_id'
TOP_1_ID = 'top_1_id'
TOP_2_ID = 'top_2_id'
USER_1_ID = 'user_1_id'
USER_2_ID = 'user_2_id'
USER_3_ID = 'user_3_id'
USER_4_ID = 'user_4_id'
def _run_one_off_job(self):
"""Runs the one-off MapReduce job."""
job_id = (
user_id_migration.AddAllUserIdsSnapshotMetadataVerificationJob
.create_new())
(user_id_migration.AddAllUserIdsSnapshotMetadataVerificationJob
.enqueue(job_id))
self.assertEqual(
self.count_jobs_in_taskqueue(
taskqueue_services.QUEUE_NAME_ONE_OFF_JOBS), 1)
self.process_and_flush_pending_tasks()
stringified_output = (
user_id_migration.AddAllUserIdsSnapshotMetadataVerificationJob
.get_output(job_id))
eval_output = [ast.literal_eval(stringified_item) for
stringified_item in stringified_output]
return [
[key, sorted(values) if isinstance(values, list) else values]
for key, values in eval_output]
def test_one_collection_rights(self):
collection_model = collection_models.CollectionRightsModel(
id=self.COL_1_ID,
owner_ids=[self.USER_1_ID],
editor_ids=[self.USER_2_ID],
voice_artist_ids=[],
viewer_ids=[],
community_owned=False,
status=constants.ACTIVITY_STATUS_PUBLIC,
viewable_if_private=False,
first_published_msec=0.0
)
collection_model.save(
'cid',
'Created new collection rights',
[{'cmd': rights_manager.CMD_CREATE_NEW}])
collection_model.owner_ids = [self.USER_3_ID]
collection_model.save(
'cid',
'Change owner',
[{
'cmd': rights_manager.CMD_CHANGE_ROLE,
'assignee_id': self.USER_3_ID,
'old_role': rights_manager.ROLE_NONE,
'new_role': rights_manager.ROLE_OWNER
}])
output = self._run_one_off_job()
self.assertEqual(
output, [['SUCCESS-CollectionRightsSnapshotMetadataModel', 2]])
self.assertItemsEqual(
[self.USER_3_ID],
collection_models.CollectionRightsAllUsersModel
.get_by_id(self.COL_1_ID).all_user_ids)
def test_one_exploration_rights(self):
exp_model = exp_models.ExplorationRightsModel(
id=self.EXP_1_ID,
owner_ids=[self.USER_1_ID, self.USER_2_ID],
editor_ids=[self.USER_2_ID],
voice_artist_ids=[],
viewer_ids=[],
community_owned=False,
status=constants.ACTIVITY_STATUS_PUBLIC,
viewable_if_private=False,
first_published_msec=0.0)
exp_model.save(
'cid', 'Created new exploration rights',
[{'cmd': rights_manager.CMD_CREATE_NEW}])
exp_model.owner_ids = [self.USER_3_ID]
exp_model.save(
'cid',
'Change owner',
[{
'cmd': rights_manager.CMD_CHANGE_ROLE,
'assignee_id': self.USER_3_ID,
'old_role': rights_manager.ROLE_NONE,
'new_role': rights_manager.ROLE_OWNER
}])
output = self._run_one_off_job()
self.assertEqual(
output, [['SUCCESS-ExplorationRightsSnapshotMetadataModel', 2]])
self.assertItemsEqual(
[self.USER_3_ID],
exp_models.ExplorationRightsAllUsersModel.get_by_id(self.EXP_1_ID)
.all_user_ids)
def test_one_topic_rights_change_role(self):
topic_model = topic_models.TopicRightsModel(
id=self.TOP_1_ID,
manager_ids=[self.USER_1_ID])
topic_model.commit(
'cid',
'Created new topic rights',
[{'cmd': rights_manager.CMD_CREATE_NEW}])
topic_model.manager_ids = [self.USER_1_ID, self.USER_3_ID]
topic_model.commit(
'cid',
'Add manager',
[{
'cmd': topic_domain.CMD_CHANGE_ROLE,
'assignee_id': self.USER_3_ID,
'old_role': topic_domain.ROLE_NONE,
'new_role': topic_domain.ROLE_MANAGER
}])
topic_model.manager_ids = [self.USER_3_ID]
topic_model.commit(
'cid',
'Remove manager',
[{
'cmd': topic_domain.CMD_REMOVE_MANAGER_ROLE,
'removed_user_id': self.USER_1_ID,
}])
output = self._run_one_off_job()
self.assertEqual(
output, [['SUCCESS-TopicRightsSnapshotMetadataModel', 3]])
self.assertItemsEqual(
[self.USER_1_ID, self.USER_3_ID],
topic_models.TopicRightsAllUsersModel.get_by_id(self.TOP_1_ID)
.all_user_ids)
def test_multiple_rights(self):
collection_model = collection_models.CollectionRightsModel(
id=self.COL_1_ID,
owner_ids=[],
editor_ids=[self.USER_1_ID],
voice_artist_ids=[],
viewer_ids=[],
community_owned=False,
status=constants.ACTIVITY_STATUS_PUBLIC,
viewable_if_private=False,
first_published_msec=0.0
)
collection_model.save(
'cid',
'Created new collection rights',
[{'cmd': rights_manager.CMD_CREATE_NEW}])
collection_model.editor_ids = [self.USER_1_ID, self.USER_4_ID]
collection_model.save(
'cid',
'Add editor',
[{
'cmd': rights_manager.CMD_CHANGE_ROLE,
'assignee_id': self.USER_4_ID,
'old_role': rights_manager.ROLE_NONE,
'new_role': rights_manager.ROLE_EDITOR
}])
exp_model = exp_models.ExplorationRightsModel(
id=self.EXP_1_ID,
owner_ids=[self.USER_1_ID, self.USER_2_ID],
editor_ids=[],
voice_artist_ids=[],
viewer_ids=[self.USER_4_ID],
community_owned=False,
status=constants.ACTIVITY_STATUS_PUBLIC,
viewable_if_private=False,
first_published_msec=0.0)
exp_model.save(
'cid', 'Created new exploration rights',
[{'cmd': rights_manager.CMD_CREATE_NEW}])
exp_model.owner_ids = [self.USER_1_ID, self.USER_2_ID, self.USER_3_ID]
exp_model.save(
'cid',
'Add owner',
[{
'cmd': rights_manager.CMD_CHANGE_ROLE,
'assignee_id': self.USER_3_ID,
'old_role': rights_manager.ROLE_NONE,
'new_role': rights_manager.ROLE_OWNER
}])
topic_model_1 = topic_models.TopicRightsModel(
id=self.TOP_1_ID,
manager_ids=[self.USER_1_ID, self.USER_2_ID])
topic_model_1.commit(
'cid',
'Created new topic rights',
[{'cmd': rights_manager.CMD_CREATE_NEW}])
topic_model_1.manager_ids = [
self.USER_1_ID, self.USER_2_ID, self.USER_3_ID]
topic_model_1.commit(
'cid',
'Add manager',
[{
'cmd': topic_domain.CMD_CHANGE_ROLE,
'assignee_id': self.USER_3_ID,
'old_role': topic_domain.ROLE_NONE,
'new_role': topic_domain.ROLE_MANAGER
}])
topic_model_2 = topic_models.TopicRightsModel(
id=self.TOP_2_ID,
manager_ids=[self.USER_1_ID, self.USER_4_ID])
topic_model_2.commit(
'cid', 'Created new topic rights',
[{'cmd': rights_manager.CMD_CREATE_NEW}])
topic_model_2.manager_ids = [self.USER_4_ID]
topic_model_2.commit(
'cid', 'Remove manager',
[{
'cmd': topic_domain.CMD_REMOVE_MANAGER_ROLE,
'removed_user_id': self.USER_1_ID,
}])
output = self._run_one_off_job()
self.assertIn(
['SUCCESS-CollectionRightsSnapshotMetadataModel', 2], output)
self.assertIn(
['SUCCESS-ExplorationRightsSnapshotMetadataModel', 2], output)
self.assertIn(['SUCCESS-TopicRightsSnapshotMetadataModel', 4], output)
self.assertItemsEqual(
[self.USER_4_ID],
collection_models.CollectionRightsAllUsersModel
.get_by_id(self.COL_1_ID).all_user_ids)
self.assertItemsEqual(
[self.USER_3_ID],
exp_models.ExplorationRightsAllUsersModel.get_by_id(self.EXP_1_ID)
.all_user_ids)
self.assertItemsEqual(
[self.USER_3_ID],
topic_models.TopicRightsAllUsersModel.get_by_id(self.TOP_1_ID)
.all_user_ids)
self.assertItemsEqual(
[self.USER_1_ID],
topic_models.TopicRightsAllUsersModel.get_by_id(self.TOP_2_ID)
.all_user_ids)
class DeleteAllUserIdsVerificationJobTests(test_utils.GenericTestBase):
COL_ID = 'col_id'
EXP_ID = 'exp_id'
TOP_ID = 'top_id'
def _run_one_off_job(self):
"""Runs the one-off MapReduce job."""
job_id = user_id_migration.DeleteAllUserIdsVerificationJob.create_new()
user_id_migration.DeleteAllUserIdsVerificationJob.enqueue(job_id)
self.assertEqual(
self.count_jobs_in_taskqueue(
taskqueue_services.QUEUE_NAME_ONE_OFF_JOBS), 1)
self.process_and_flush_pending_tasks()
stringified_output = (
user_id_migration.DeleteAllUserIdsVerificationJob.get_output(
job_id))
eval_output = [ast.literal_eval(stringified_item) for
stringified_item in stringified_output]
return eval_output
def test_one_collection_rights(self):
collection_models.CollectionRightsAllUsersModel(
id=self.COL_ID,
all_user_ids=['some_id']
).put()
output = self._run_one_off_job()
self.assertEqual(output, [['SUCCESS', 1]])
self.assertIsNone(
collection_models.CollectionRightsAllUsersModel.get_by_id(
self.COL_ID))
def test_one_exploration_rights(self):
exp_models.ExplorationRightsAllUsersModel(
id=self.EXP_ID,
all_user_ids=['some_id']
).put()
output = self._run_one_off_job()
self.assertEqual(output, [['SUCCESS', 1]])
self.assertIsNone(
exp_models.ExplorationRightsAllUsersModel.get_by_id(self.EXP_ID))
def test_one_topic_rights(self):
topic_models.TopicRightsAllUsersModel(
id=self.EXP_ID,
all_user_ids=['some_id']
).put()
output = self._run_one_off_job()
self.assertEqual(output, [['SUCCESS', 1]])
self.assertIsNone(
topic_models.TopicRightsAllUsersModel.get_by_id(self.TOP_ID))
def test_multiple_rights(self):
collection_models.CollectionRightsAllUsersModel(
id=self.COL_ID,
all_user_ids=[]
).put()
exp_models.ExplorationRightsAllUsersModel(
id=self.EXP_ID,
all_user_ids=['some_id']
).put()
topic_models.TopicRightsAllUsersModel(
id=self.EXP_ID,
all_user_ids=['some_id', 'some_other_id']
).put()
output = self._run_one_off_job()
self.assertEqual(output, [['SUCCESS', 3]])
self.assertIsNone(
collection_models.CollectionRightsAllUsersModel.get_by_id(
self.COL_ID))
self.assertIsNone(
exp_models.ExplorationRightsAllUsersModel.get_by_id(self.EXP_ID))
self.assertIsNone(
topic_models.TopicRightsAllUsersModel.get_by_id(self.TOP_ID))
| 40.883792
| 80
| 0.627619
| 16,672
| 147,059
| 5.085773
| 0.023213
| 0.065008
| 0.049416
| 0.016983
| 0.917832
| 0.900212
| 0.88159
| 0.866871
| 0.858545
| 0.842411
| 0
| 0.01401
| 0.281669
| 147,059
| 3,596
| 81
| 40.895161
| 0.788644
| 0.015579
| 0
| 0.848109
| 0
| 0
| 0.084072
| 0.030096
| 0
| 0
| 0
| 0
| 0.110043
| 1
| 0.031308
| false
| 0.00217
| 0.00434
| 0
| 0.069436
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 7
|
6385c1bb58ed57658611b35c12f59ab31d397d43
| 58
|
py
|
Python
|
examples/modules/clustering.py
|
snehankekre/streamlit-yellowbrick
|
fd94bf4554966390ee578831612350d613aa3de7
|
[
"MIT"
] | 7
|
2021-06-08T10:24:19.000Z
|
2022-02-02T11:57:56.000Z
|
examples/modules/clustering.py
|
snehankekre/streamlit-yellowbrick
|
fd94bf4554966390ee578831612350d613aa3de7
|
[
"MIT"
] | null | null | null |
examples/modules/clustering.py
|
snehankekre/streamlit-yellowbrick
|
fd94bf4554966390ee578831612350d613aa3de7
|
[
"MIT"
] | null | null | null |
import streamlit as st
def run_clustering():
return
| 9.666667
| 22
| 0.724138
| 8
| 58
| 5.125
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.224138
| 58
| 5
| 23
| 11.6
| 0.911111
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0.333333
| true
| 0
| 0.333333
| 0.333333
| 1
| 0
| 1
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 1
| 1
| 0
| 1
| 1
| 1
| 0
|
0
| 7
|
63aac52744e9eae6929b10da52bbfce5000667cf
| 4,961
|
py
|
Python
|
pyaz/monitor/log_analytics/workspace/saved_search/__init__.py
|
py-az-cli/py-az-cli
|
9a7dc44e360c096a5a2f15595353e9dad88a9792
|
[
"MIT"
] | null | null | null |
pyaz/monitor/log_analytics/workspace/saved_search/__init__.py
|
py-az-cli/py-az-cli
|
9a7dc44e360c096a5a2f15595353e9dad88a9792
|
[
"MIT"
] | null | null | null |
pyaz/monitor/log_analytics/workspace/saved_search/__init__.py
|
py-az-cli/py-az-cli
|
9a7dc44e360c096a5a2f15595353e9dad88a9792
|
[
"MIT"
] | 1
|
2022-02-03T09:12:01.000Z
|
2022-02-03T09:12:01.000Z
|
from ..... pyaz_utils import _call_az
def create(category, display_name, name, resource_group, saved_query, workspace_name, func_alias=None, func_param=None, tags=None):
'''
Create a saved search for a given workspace.
Required Parameters:
- category -- The category of the saved search. This helps the user to find a saved search faster.
- display_name -- Display name of the saved search.
- name -- Name of the saved search and it's unique in a given workspace.
- resource_group -- Name of resource group. You can configure the default group using `az configure --defaults group=<name>`
- saved_query -- The query expression for the saved search.
- workspace_name -- Name of the Log Analytics Workspace.
Optional Parameters:
- func_alias -- Function Aliases are short names given to Saved Searches so they can be easily referenced in query. They are required for Computer Groups.
- func_param -- The optional function parameters if query serves as a function. Value should be in the following format: 'param-name1:type1 = default_value1, param-name2:type2 = default_value2'. For more examples and proper syntax please refer to https://docs.microsoft.com/azure/kusto/query/functions/user-defined-functions.
- tags -- space-separated tags: key[=value] [key[=value] ...]. Use '' to clear existing tags.
'''
return _call_az("az monitor log-analytics workspace saved-search create", locals())
def update(name, resource_group, workspace_name, add=None, category=None, display_name=None, force_string=None, func_alias=None, func_param=None, remove=None, saved_query=None, set=None, tags=None):
'''
Update a saved search for a given workspace.
Required Parameters:
- name -- Name of the saved search and it's unique in a given workspace.
- resource_group -- Name of resource group. You can configure the default group using `az configure --defaults group=<name>`
- workspace_name -- Name of the Log Analytics Workspace.
Optional Parameters:
- add -- Add an object to a list of objects by specifying a path and key value pairs. Example: --add property.listProperty <key=value, string or JSON string>
- category -- The category of the saved search. This helps the user to find a saved search faster.
- display_name -- Display name of the saved search.
- force_string -- When using 'set' or 'add', preserve string literals instead of attempting to convert to JSON.
- func_alias -- Function Aliases are short names given to Saved Searches so they can be easily referenced in query. They are required for Computer Groups.
- func_param -- The optional function parameters if query serves as a function. Value should be in the following format: 'param-name1:type1 = default_value1, param-name2:type2 = default_value2'. For more examples and proper syntax please refer to https://docs.microsoft.com/azure/kusto/query/functions/user-defined-functions.
- remove -- Remove a property or an element from a list. Example: --remove property.list <indexToRemove> OR --remove propertyToRemove
- saved_query -- The query expression for the saved search.
- set -- Update an object by specifying a property path and value to set. Example: --set property1.property2=<value>
- tags -- space-separated tags: key[=value] [key[=value] ...]. Use '' to clear existing tags.
'''
return _call_az("az monitor log-analytics workspace saved-search update", locals())
def delete(name, resource_group, workspace_name, yes=None):
'''
Delete a saved search for a given workspace.
Required Parameters:
- name -- Name of the saved search and it's unique in a given workspace.
- resource_group -- Name of resource group. You can configure the default group using `az configure --defaults group=<name>`
- workspace_name -- Name of the Log Analytics Workspace.
Optional Parameters:
- yes -- Do not prompt for confirmation.
'''
return _call_az("az monitor log-analytics workspace saved-search delete", locals())
def show(name, resource_group, workspace_name):
'''
Show a saved search for a given workspace.
Required Parameters:
- name -- Name of the saved search and it's unique in a given workspace.
- resource_group -- Name of resource group. You can configure the default group using `az configure --defaults group=<name>`
- workspace_name -- Name of the Log Analytics Workspace.
'''
return _call_az("az monitor log-analytics workspace saved-search show", locals())
def list(resource_group, workspace_name):
'''
List all saved searches for a given workspace.
Required Parameters:
- resource_group -- Name of resource group. You can configure the default group using `az configure --defaults group=<name>`
- workspace_name -- Name of the Log Analytics Workspace.
'''
return _call_az("az monitor log-analytics workspace saved-search list", locals())
| 59.059524
| 329
| 0.729893
| 710
| 4,961
| 5.022535
| 0.192958
| 0.064778
| 0.027762
| 0.03281
| 0.78267
| 0.757431
| 0.732754
| 0.732754
| 0.732754
| 0.694055
| 0
| 0.003462
| 0.184842
| 4,961
| 83
| 330
| 59.771084
| 0.878338
| 0.753679
| 0
| 0
| 0
| 0
| 0.268959
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0.454545
| false
| 0
| 0.090909
| 0
| 1
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 1
| 0
|
0
| 7
|
984282e356295258e0c8f638ac95381bf90d165f
| 3,627
|
py
|
Python
|
torpido/wavelet/wavelets/db12.py
|
AP-Atul/Torpido
|
a646b4d6de7f2e2c96de4c64ce3113f53e3931c2
|
[
"Unlicense"
] | 21
|
2020-12-23T07:13:10.000Z
|
2022-01-12T10:32:22.000Z
|
wavelet/wavelets/db12.py
|
AP-Atul/wavelets-ext
|
00ced22462c369584ebd32f9b5f357f092de0142
|
[
"MIT"
] | 2
|
2020-12-30T10:45:42.000Z
|
2021-09-25T09:52:00.000Z
|
wavelet/wavelets/db12.py
|
AP-Atul/wavelets-ext
|
00ced22462c369584ebd32f9b5f357f092de0142
|
[
"MIT"
] | 1
|
2021-02-06T21:39:41.000Z
|
2021-02-06T21:39:41.000Z
|
""" Daubechies 12 wavelet """
class Daubechies12:
"""
Properties
----------
asymmetric, orthogonal, bi-orthogonal
All values are from http://wavelets.pybytes.com/wavelet/db12/
"""
__name__ = "Daubechies Wavelet 12"
__motherWaveletLength__ = 24 # length of the mother wavelet
__transformWaveletLength__ = 2 # minimum wavelength of input signal
# decomposition filter
# low-pass
decompositionLowFilter = [
-1.5290717580684923e-06,
1.2776952219379579e-05,
- 2.4241545757030318e-05,
- 8.850410920820318e-05,
0.0003886530628209267,
6.5451282125215034e-06,
- 0.0021795036186277044,
0.0022486072409952287,
0.006711499008795549,
- 0.012840825198299882,
- 0.01221864906974642,
0.04154627749508764,
0.010849130255828966,
- 0.09643212009649671,
0.0053595696743599965,
0.18247860592758275,
- 0.023779257256064865,
- 0.31617845375277914,
- 0.04476388565377762,
0.5158864784278007,
0.6571987225792911,
0.3773551352142041,
0.10956627282118277,
0.013112257957229239
]
# high-pass
decompositionHighFilter = [
-0.013112257957229239,
0.10956627282118277,
- 0.3773551352142041,
0.6571987225792911,
- 0.5158864784278007,
- 0.04476388565377762,
0.31617845375277914,
- 0.023779257256064865,
- 0.18247860592758275,
0.0053595696743599965,
0.09643212009649671,
0.010849130255828966,
- 0.04154627749508764,
- 0.01221864906974642,
0.012840825198299882,
0.006711499008795549,
- 0.0022486072409952287,
- 0.0021795036186277044,
- 6.5451282125215034e-06,
0.0003886530628209267,
8.850410920820318e-05,
- 2.4241545757030318e-05,
- 1.2776952219379579e-05,
- 1.5290717580684923e-06
]
# reconstruction filters
# low pass
reconstructionLowFilter = [
0.013112257957229239,
0.10956627282118277,
0.3773551352142041,
0.6571987225792911,
0.5158864784278007,
- 0.04476388565377762,
- 0.31617845375277914,
- 0.023779257256064865,
0.18247860592758275,
0.0053595696743599965,
- 0.09643212009649671,
0.010849130255828966,
0.04154627749508764,
- 0.01221864906974642,
- 0.012840825198299882,
0.006711499008795549,
0.0022486072409952287,
- 0.0021795036186277044,
6.5451282125215034e-06,
0.0003886530628209267,
- 8.850410920820318e-05,
- 2.4241545757030318e-05,
1.2776952219379579e-05,
- 1.5290717580684923e-06
]
# high-pass
reconstructionHighFilter = [
-1.5290717580684923e-06,
- 1.2776952219379579e-05,
- 2.4241545757030318e-05,
8.850410920820318e-05,
0.0003886530628209267,
- 6.5451282125215034e-06,
- 0.0021795036186277044,
- 0.0022486072409952287,
0.006711499008795549,
0.012840825198299882,
- 0.01221864906974642,
- 0.04154627749508764,
0.010849130255828966,
0.09643212009649671,
0.0053595696743599965,
- 0.18247860592758275,
- 0.023779257256064865,
0.31617845375277914,
- 0.04476388565377762,
- 0.5158864784278007,
0.6571987225792911,
- 0.3773551352142041,
0.10956627282118277,
- 0.013112257957229239
]
| 28.116279
| 72
| 0.61373
| 266
| 3,627
| 8.323308
| 0.270677
| 0.03252
| 0.036134
| 0.039747
| 0.814815
| 0.814815
| 0.814815
| 0.814815
| 0.814815
| 0.814815
| 0
| 0.705582
| 0.298594
| 3,627
| 128
| 73
| 28.335938
| 0.164701
| 0.080507
| 0
| 0.851852
| 0
| 0
| 0.006375
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| false
| 0
| 0
| 0
| 0.074074
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 9
|
98541c0770934db219be3afb2cc6d2f400ceea26
| 143,947
|
py
|
Python
|
release/scripts/addons/rigify/legacy/metarigs/pitchipoy_human.py
|
noorbeast/BlenderSource
|
65ebecc5108388965678b04b43463b85f6c69c1d
|
[
"Naumen",
"Condor-1.1",
"MS-PL"
] | 3
|
2019-09-16T10:29:19.000Z
|
2022-02-11T14:43:18.000Z
|
engine/2.80/scripts/addons/rigify/legacy/metarigs/pitchipoy_human.py
|
byteinc/Phasor
|
f7d23a489c2b4bcc3c1961ac955926484ff8b8d9
|
[
"Unlicense"
] | null | null | null |
engine/2.80/scripts/addons/rigify/legacy/metarigs/pitchipoy_human.py
|
byteinc/Phasor
|
f7d23a489c2b4bcc3c1961ac955926484ff8b8d9
|
[
"Unlicense"
] | null | null | null |
import bpy
def create(obj):
# generated by rigify.utils.write_metarig
bpy.ops.object.mode_set(mode='EDIT')
arm = obj.data
for i in range(28):
arm.rigify_layers.add()
arm.rigify_layers[0].name = "Face"
arm.rigify_layers[0].row = 1
arm.rigify_layers[1].name = "Face (Primary)"
arm.rigify_layers[1].row = 2
arm.rigify_layers[2].name = "Face (Secondary)"
arm.rigify_layers[2].row = 2
arm.rigify_layers[3].name = "Torso"
arm.rigify_layers[3].row = 3
arm.rigify_layers[4].name = "Torso (Tweak)"
arm.rigify_layers[4].row = 4
arm.rigify_layers[5].name = "Fingers"
arm.rigify_layers[5].row = 5
arm.rigify_layers[6].name = "Fingers (Tweak)"
arm.rigify_layers[6].row = 6
arm.rigify_layers[7].name = "Arm.L (IK)"
arm.rigify_layers[7].row = 7
arm.rigify_layers[8].name = "Arm.L (FK)"
arm.rigify_layers[8].row = 8
arm.rigify_layers[9].name = "Arm.L (Tweak)"
arm.rigify_layers[9].row = 9
arm.rigify_layers[10].name = "Arm.R (IK)"
arm.rigify_layers[10].row = 7
arm.rigify_layers[11].name = "Arm.R (FK)"
arm.rigify_layers[11].row = 8
arm.rigify_layers[12].name = "Arm.R (Tweak)"
arm.rigify_layers[12].row = 9
arm.rigify_layers[13].name = "Leg.L (IK)"
arm.rigify_layers[13].row = 10
arm.rigify_layers[14].name = "Leg.L (FK)"
arm.rigify_layers[14].row = 11
arm.rigify_layers[15].name = "Leg.L (Tweak)"
arm.rigify_layers[15].row = 12
arm.rigify_layers[16].name = "Leg.R (IK)"
arm.rigify_layers[16].row = 10
arm.rigify_layers[17].name = "Leg.R (FK)"
arm.rigify_layers[17].row = 11
arm.rigify_layers[18].name = "Leg.R (Tweak)"
arm.rigify_layers[18].row = 12
arm.rigify_layers[19].name = ""
arm.rigify_layers[19].row = 1
arm.rigify_layers[20].name = ""
arm.rigify_layers[20].row = 1
arm.rigify_layers[21].name = ""
arm.rigify_layers[21].row = 1
arm.rigify_layers[22].name = ""
arm.rigify_layers[22].row = 1
arm.rigify_layers[23].name = ""
arm.rigify_layers[23].row = 1
arm.rigify_layers[24].name = ""
arm.rigify_layers[24].row = 1
arm.rigify_layers[25].name = ""
arm.rigify_layers[25].row = 1
arm.rigify_layers[26].name = ""
arm.rigify_layers[26].row = 1
arm.rigify_layers[27].name = ""
arm.rigify_layers[27].row = 1
bones = {}
bone = arm.edit_bones.new('spine')
bone.head[:] = 0.0000, 0.0552, 1.0099
bone.tail[:] = 0.0000, 0.0172, 1.1573
bone.roll = 0.0000
bone.use_connect = False
bones['spine'] = bone.name
bone = arm.edit_bones.new('spine.001')
bone.head[:] = 0.0000, 0.0172, 1.1573
bone.tail[:] = 0.0000, 0.0004, 1.2929
bone.roll = 0.0000
bone.use_connect = True
bone.parent = arm.edit_bones[bones['spine']]
bones['spine.001'] = bone.name
bone = arm.edit_bones.new('pelvis.L')
bone.head[:] = 0.0000, 0.0552, 1.0099
bone.tail[:] = 0.1112, -0.0451, 1.1533
bone.roll = -1.0756
bone.use_connect = False
bone.parent = arm.edit_bones[bones['spine']]
bones['pelvis.L'] = bone.name
bone = arm.edit_bones.new('pelvis.R')
bone.head[:] = -0.0000, 0.0552, 1.0099
bone.tail[:] = -0.1112, -0.0451, 1.1533
bone.roll = 1.0756
bone.use_connect = False
bone.parent = arm.edit_bones[bones['spine']]
bones['pelvis.R'] = bone.name
bone = arm.edit_bones.new('thigh.L')
bone.head[:] = 0.0980, 0.0124, 1.0720
bone.tail[:] = 0.0980, -0.0286, 0.5372
bone.roll = 0.0000
bone.use_connect = False
bone.parent = arm.edit_bones[bones['spine']]
bones['thigh.L'] = bone.name
bone = arm.edit_bones.new('thigh.R')
bone.head[:] = -0.0980, 0.0124, 1.0720
bone.tail[:] = -0.0980, -0.0286, 0.5372
bone.roll = 0.0000
bone.use_connect = False
bone.parent = arm.edit_bones[bones['spine']]
bones['thigh.R'] = bone.name
bone = arm.edit_bones.new('spine.002')
bone.head[:] = 0.0000, 0.0004, 1.2929
bone.tail[:] = 0.0000, 0.0059, 1.4657
bone.roll = 0.0000
bone.use_connect = True
bone.parent = arm.edit_bones[bones['spine.001']]
bones['spine.002'] = bone.name
bone = arm.edit_bones.new('shin.L')
bone.head[:] = 0.0980, -0.0286, 0.5372
bone.tail[:] = 0.0980, 0.0162, 0.0852
bone.roll = 0.0000
bone.use_connect = True
bone.parent = arm.edit_bones[bones['thigh.L']]
bones['shin.L'] = bone.name
bone = arm.edit_bones.new('shin.R')
bone.head[:] = -0.0980, -0.0286, 0.5372
bone.tail[:] = -0.0980, 0.0162, 0.0852
bone.roll = 0.0000
bone.use_connect = True
bone.parent = arm.edit_bones[bones['thigh.R']]
bones['shin.R'] = bone.name
bone = arm.edit_bones.new('spine.003')
bone.head[:] = 0.0000, 0.0059, 1.4657
bone.tail[:] = 0.0000, 0.0114, 1.6582
bone.roll = 0.0000
bone.use_connect = True
bone.parent = arm.edit_bones[bones['spine.002']]
bones['spine.003'] = bone.name
bone = arm.edit_bones.new('foot.L')
bone.head[:] = 0.0980, 0.0162, 0.0852
bone.tail[:] = 0.0980, -0.0934, 0.0167
bone.roll = 0.0000
bone.use_connect = True
bone.parent = arm.edit_bones[bones['shin.L']]
bones['foot.L'] = bone.name
bone = arm.edit_bones.new('foot.R')
bone.head[:] = -0.0980, 0.0162, 0.0852
bone.tail[:] = -0.0980, -0.0934, 0.0167
bone.roll = -0.0000
bone.use_connect = True
bone.parent = arm.edit_bones[bones['shin.R']]
bones['foot.R'] = bone.name
bone = arm.edit_bones.new('spine.004')
bone.head[:] = 0.0000, 0.0114, 1.6582
bone.tail[:] = 0.0000, -0.0067, 1.7197
bone.roll = 0.0000
bone.use_connect = True
bone.parent = arm.edit_bones[bones['spine.003']]
bones['spine.004'] = bone.name
bone = arm.edit_bones.new('shoulder.L')
bone.head[:] = 0.0183, -0.0684, 1.6051
bone.tail[:] = 0.1694, 0.0205, 1.6050
bone.roll = 0.0004
bone.use_connect = False
bone.parent = arm.edit_bones[bones['spine.003']]
bones['shoulder.L'] = bone.name
bone = arm.edit_bones.new('shoulder.R')
bone.head[:] = -0.0183, -0.0684, 1.6051
bone.tail[:] = -0.1694, 0.0205, 1.6050
bone.roll = -0.0004
bone.use_connect = False
bone.parent = arm.edit_bones[bones['spine.003']]
bones['shoulder.R'] = bone.name
bone = arm.edit_bones.new('breast.L')
bone.head[:] = 0.1184, 0.0485, 1.4596
bone.tail[:] = 0.1184, -0.0907, 1.4596
bone.roll = 0.0000
bone.use_connect = False
bone.parent = arm.edit_bones[bones['spine.003']]
bones['breast.L'] = bone.name
bone = arm.edit_bones.new('breast.R')
bone.head[:] = -0.1184, 0.0485, 1.4596
bone.tail[:] = -0.1184, -0.0907, 1.4596
bone.roll = -0.0000
bone.use_connect = False
bone.parent = arm.edit_bones[bones['spine.003']]
bones['breast.R'] = bone.name
bone = arm.edit_bones.new('toe.L')
bone.head[:] = 0.0980, -0.0934, 0.0167
bone.tail[:] = 0.0980, -0.1606, 0.0167
bone.roll = -0.0000
bone.use_connect = True
bone.parent = arm.edit_bones[bones['foot.L']]
bones['toe.L'] = bone.name
bone = arm.edit_bones.new('heel.02.L')
bone.head[:] = 0.0600, 0.0459, 0.0000
bone.tail[:] = 0.1400, 0.0459, 0.0000
bone.roll = 0.0000
bone.use_connect = False
bone.parent = arm.edit_bones[bones['foot.L']]
bones['heel.02.L'] = bone.name
bone = arm.edit_bones.new('toe.R')
bone.head[:] = -0.0980, -0.0934, 0.0167
bone.tail[:] = -0.0980, -0.1606, 0.0167
bone.roll = 0.0000
bone.use_connect = True
bone.parent = arm.edit_bones[bones['foot.R']]
bones['toe.R'] = bone.name
bone = arm.edit_bones.new('heel.02.R')
bone.head[:] = -0.0600, 0.0459, 0.0000
bone.tail[:] = -0.1400, 0.0459, 0.0000
bone.roll = -0.0000
bone.use_connect = False
bone.parent = arm.edit_bones[bones['foot.R']]
bones['heel.02.R'] = bone.name
bone = arm.edit_bones.new('spine.005')
bone.head[:] = 0.0000, -0.0067, 1.7197
bone.tail[:] = 0.0000, -0.0247, 1.7813
bone.roll = 0.0000
bone.use_connect = True
bone.parent = arm.edit_bones[bones['spine.004']]
bones['spine.005'] = bone.name
bone = arm.edit_bones.new('upper_arm.L')
bone.head[:] = 0.1953, 0.0267, 1.5846
bone.tail[:] = 0.4424, 0.0885, 1.4491
bone.roll = 2.0724
bone.use_connect = False
bone.parent = arm.edit_bones[bones['shoulder.L']]
bones['upper_arm.L'] = bone.name
bone = arm.edit_bones.new('upper_arm.R')
bone.head[:] = -0.1953, 0.0267, 1.5846
bone.tail[:] = -0.4424, 0.0885, 1.4491
bone.roll = -2.0724
bone.use_connect = False
bone.parent = arm.edit_bones[bones['shoulder.R']]
bones['upper_arm.R'] = bone.name
bone = arm.edit_bones.new('spine.006')
bone.head[:] = 0.0000, -0.0247, 1.7813
bone.tail[:] = 0.0000, -0.0247, 1.9796
bone.roll = 0.0000
bone.use_connect = True
bone.parent = arm.edit_bones[bones['spine.005']]
bones['spine.006'] = bone.name
bone = arm.edit_bones.new('forearm.L')
bone.head[:] = 0.4424, 0.0885, 1.4491
bone.tail[:] = 0.6594, 0.0492, 1.3061
bone.roll = 2.1535
bone.use_connect = True
bone.parent = arm.edit_bones[bones['upper_arm.L']]
bones['forearm.L'] = bone.name
bone = arm.edit_bones.new('forearm.R')
bone.head[:] = -0.4424, 0.0885, 1.4491
bone.tail[:] = -0.6594, 0.0492, 1.3061
bone.roll = -2.1535
bone.use_connect = True
bone.parent = arm.edit_bones[bones['upper_arm.R']]
bones['forearm.R'] = bone.name
bone = arm.edit_bones.new('face')
bone.head[:] = 0.0000, -0.0247, 1.7813
bone.tail[:] = 0.0000, -0.0247, 1.8725
bone.roll = 0.0000
bone.use_connect = False
bone.parent = arm.edit_bones[bones['spine.006']]
bones['face'] = bone.name
bone = arm.edit_bones.new('hand.L')
bone.head[:] = 0.6594, 0.0492, 1.3061
bone.tail[:] = 0.7234, 0.0412, 1.2585
bone.roll = 2.2103
bone.use_connect = True
bone.parent = arm.edit_bones[bones['forearm.L']]
bones['hand.L'] = bone.name
bone = arm.edit_bones.new('hand.R')
bone.head[:] = -0.6594, 0.0492, 1.3061
bone.tail[:] = -0.7234, 0.0412, 1.2585
bone.roll = -2.2103
bone.use_connect = True
bone.parent = arm.edit_bones[bones['forearm.R']]
bones['hand.R'] = bone.name
bone = arm.edit_bones.new('nose')
bone.head[:] = 0.0000, -0.1536, 1.8978
bone.tail[:] = 0.0000, -0.1834, 1.8589
bone.roll = 0.0000
bone.use_connect = False
bone.parent = arm.edit_bones[bones['face']]
bones['nose'] = bone.name
bone = arm.edit_bones.new('lip.T.L')
bone.head[:] = -0.0000, -0.1710, 1.8140
bone.tail[:] = 0.0195, -0.1656, 1.8146
bone.roll = 0.0000
bone.use_connect = False
bone.parent = arm.edit_bones[bones['face']]
bones['lip.T.L'] = bone.name
bone = arm.edit_bones.new('lip.B.L')
bone.head[:] = -0.0000, -0.1667, 1.7978
bone.tail[:] = 0.0185, -0.1585, 1.8028
bone.roll = -0.0789
bone.use_connect = False
bone.parent = arm.edit_bones[bones['face']]
bones['lip.B.L'] = bone.name
bone = arm.edit_bones.new('jaw')
bone.head[:] = 0.0000, -0.0945, 1.7439
bone.tail[:] = 0.0000, -0.1519, 1.7392
bone.roll = 0.0000
bone.use_connect = False
bone.parent = arm.edit_bones[bones['face']]
bones['jaw'] = bone.name
bone = arm.edit_bones.new('ear.L')
bone.head[:] = 0.0919, -0.0309, 1.8622
bone.tail[:] = 0.0989, -0.0336, 1.9017
bone.roll = -0.0324
bone.use_connect = False
bone.parent = arm.edit_bones[bones['face']]
bones['ear.L'] = bone.name
bone = arm.edit_bones.new('ear.R')
bone.head[:] = -0.0919, -0.0309, 1.8622
bone.tail[:] = -0.0989, -0.0336, 1.9017
bone.roll = 0.0324
bone.use_connect = False
bone.parent = arm.edit_bones[bones['face']]
bones['ear.R'] = bone.name
bone = arm.edit_bones.new('lip.T.R')
bone.head[:] = 0.0000, -0.1710, 1.8140
bone.tail[:] = -0.0195, -0.1656, 1.8146
bone.roll = -0.0000
bone.use_connect = False
bone.parent = arm.edit_bones[bones['face']]
bones['lip.T.R'] = bone.name
bone = arm.edit_bones.new('lip.B.R')
bone.head[:] = 0.0000, -0.1667, 1.7978
bone.tail[:] = -0.0185, -0.1585, 1.8028
bone.roll = 0.0789
bone.use_connect = False
bone.parent = arm.edit_bones[bones['face']]
bones['lip.B.R'] = bone.name
bone = arm.edit_bones.new('brow.B.L')
bone.head[:] = 0.0791, -0.1237, 1.9020
bone.tail[:] = 0.0704, -0.1349, 1.9078
bone.roll = 0.0412
bone.use_connect = False
bone.parent = arm.edit_bones[bones['face']]
bones['brow.B.L'] = bone.name
bone = arm.edit_bones.new('lid.T.L')
bone.head[:] = 0.0768, -0.1218, 1.8947
bone.tail[:] = 0.0678, -0.1356, 1.8995
bone.roll = -0.2079
bone.use_connect = False
bone.parent = arm.edit_bones[bones['face']]
bones['lid.T.L'] = bone.name
bone = arm.edit_bones.new('brow.B.R')
bone.head[:] = -0.0791, -0.1237, 1.9020
bone.tail[:] = -0.0704, -0.1349, 1.9078
bone.roll = -0.0412
bone.use_connect = False
bone.parent = arm.edit_bones[bones['face']]
bones['brow.B.R'] = bone.name
bone = arm.edit_bones.new('lid.T.R')
bone.head[:] = -0.0768, -0.1218, 1.8947
bone.tail[:] = -0.0678, -0.1356, 1.8995
bone.roll = 0.2079
bone.use_connect = False
bone.parent = arm.edit_bones[bones['face']]
bones['lid.T.R'] = bone.name
bone = arm.edit_bones.new('forehead.L')
bone.head[:] = 0.0168, -0.1325, 1.9704
bone.tail[:] = 0.0215, -0.1546, 1.9144
bone.roll = 1.4313
bone.use_connect = False
bone.parent = arm.edit_bones[bones['face']]
bones['forehead.L'] = bone.name
bone = arm.edit_bones.new('forehead.R')
bone.head[:] = -0.0168, -0.1325, 1.9704
bone.tail[:] = -0.0215, -0.1546, 1.9144
bone.roll = -1.4313
bone.use_connect = False
bone.parent = arm.edit_bones[bones['face']]
bones['forehead.R'] = bone.name
bone = arm.edit_bones.new('eye.L')
bone.head[:] = 0.0516, -0.1209, 1.8941
bone.tail[:] = 0.0516, -0.1451, 1.8941
bone.roll = 0.0000
bone.use_connect = False
bone.parent = arm.edit_bones[bones['face']]
bones['eye.L'] = bone.name
bone = arm.edit_bones.new('eye.R')
bone.head[:] = -0.0516, -0.1209, 1.8941
bone.tail[:] = -0.0516, -0.1451, 1.8941
bone.roll = -0.0000
bone.use_connect = False
bone.parent = arm.edit_bones[bones['face']]
bones['eye.R'] = bone.name
bone = arm.edit_bones.new('cheek.T.L')
bone.head[:] = 0.0848, -0.0940, 1.8870
bone.tail[:] = 0.0565, -0.1430, 1.8517
bone.roll = -0.0096
bone.use_connect = False
bone.parent = arm.edit_bones[bones['face']]
bones['cheek.T.L'] = bone.name
bone = arm.edit_bones.new('cheek.T.R')
bone.head[:] = -0.0848, -0.0940, 1.8870
bone.tail[:] = -0.0565, -0.1430, 1.8517
bone.roll = 0.0096
bone.use_connect = False
bone.parent = arm.edit_bones[bones['face']]
bones['cheek.T.R'] = bone.name
bone = arm.edit_bones.new('teeth.T')
bone.head[:] = 0.0000, -0.1568, 1.8214
bone.tail[:] = 0.0000, -0.1112, 1.8214
bone.roll = 0.0000
bone.use_connect = False
bone.parent = arm.edit_bones[bones['face']]
bones['teeth.T'] = bone.name
bone = arm.edit_bones.new('teeth.B')
bone.head[:] = 0.0000, -0.1500, 1.7892
bone.tail[:] = 0.0000, -0.1043, 1.7892
bone.roll = 0.0000
bone.use_connect = False
bone.parent = arm.edit_bones[bones['face']]
bones['teeth.B'] = bone.name
bone = arm.edit_bones.new('tongue')
bone.head[:] = 0.0000, -0.1354, 1.7946
bone.tail[:] = 0.0000, -0.1101, 1.8002
bone.roll = 0.0000
bone.use_connect = False
bone.parent = arm.edit_bones[bones['face']]
bones['tongue'] = bone.name
bone = arm.edit_bones.new('palm.01.L')
bone.head[:] = 0.6921, 0.0224, 1.2882
bone.tail[:] = 0.7464, 0.0051, 1.2482
bone.roll = -2.4928
bone.use_connect = False
bone.parent = arm.edit_bones[bones['hand.L']]
bones['palm.01.L'] = bone.name
bone = arm.edit_bones.new('palm.02.L')
bone.head[:] = 0.6970, 0.0389, 1.2877
bone.tail[:] = 0.7518, 0.0277, 1.2487
bone.roll = -2.5274
bone.use_connect = False
bone.parent = arm.edit_bones[bones['hand.L']]
bones['palm.02.L'] = bone.name
bone = arm.edit_bones.new('palm.03.L')
bone.head[:] = 0.6963, 0.0545, 1.2874
bone.tail[:] = 0.7540, 0.0521, 1.2482
bone.roll = -2.5843
bone.use_connect = False
bone.parent = arm.edit_bones[bones['hand.L']]
bones['palm.03.L'] = bone.name
bone = arm.edit_bones.new('palm.04.L')
bone.head[:] = 0.6929, 0.0696, 1.2871
bone.tail[:] = 0.7528, 0.0763, 1.2428
bone.roll = -2.5155
bone.use_connect = False
bone.parent = arm.edit_bones[bones['hand.L']]
bones['palm.04.L'] = bone.name
bone = arm.edit_bones.new('palm.01.R')
bone.head[:] = -0.6921, 0.0224, 1.2882
bone.tail[:] = -0.7464, 0.0051, 1.2482
bone.roll = 2.4928
bone.use_connect = False
bone.parent = arm.edit_bones[bones['hand.R']]
bones['palm.01.R'] = bone.name
bone = arm.edit_bones.new('palm.02.R')
bone.head[:] = -0.6970, 0.0389, 1.2877
bone.tail[:] = -0.7518, 0.0277, 1.2487
bone.roll = 2.5274
bone.use_connect = False
bone.parent = arm.edit_bones[bones['hand.R']]
bones['palm.02.R'] = bone.name
bone = arm.edit_bones.new('palm.03.R')
bone.head[:] = -0.6963, 0.0544, 1.2874
bone.tail[:] = -0.7540, 0.0521, 1.2482
bone.roll = 2.5843
bone.use_connect = False
bone.parent = arm.edit_bones[bones['hand.R']]
bones['palm.03.R'] = bone.name
bone = arm.edit_bones.new('palm.04.R')
bone.head[:] = -0.6929, 0.0696, 1.2871
bone.tail[:] = -0.7528, 0.0763, 1.2428
bone.roll = 2.5155
bone.use_connect = False
bone.parent = arm.edit_bones[bones['hand.R']]
bones['palm.04.R'] = bone.name
bone = arm.edit_bones.new('nose.001')
bone.head[:] = 0.0000, -0.1834, 1.8589
bone.tail[:] = 0.0000, -0.1965, 1.8450
bone.roll = 0.0000
bone.use_connect = True
bone.parent = arm.edit_bones[bones['nose']]
bones['nose.001'] = bone.name
bone = arm.edit_bones.new('lip.T.L.001')
bone.head[:] = 0.0195, -0.1656, 1.8146
bone.tail[:] = 0.0352, -0.1494, 1.8074
bone.roll = 0.0236
bone.use_connect = True
bone.parent = arm.edit_bones[bones['lip.T.L']]
bones['lip.T.L.001'] = bone.name
bone = arm.edit_bones.new('lip.B.L.001')
bone.head[:] = 0.0185, -0.1585, 1.8028
bone.tail[:] = 0.0352, -0.1494, 1.8074
bone.roll = 0.0731
bone.use_connect = True
bone.parent = arm.edit_bones[bones['lip.B.L']]
bones['lip.B.L.001'] = bone.name
bone = arm.edit_bones.new('chin')
bone.head[:] = 0.0000, -0.1519, 1.7392
bone.tail[:] = 0.0000, -0.1634, 1.7692
bone.roll = 0.0000
bone.use_connect = True
bone.parent = arm.edit_bones[bones['jaw']]
bones['chin'] = bone.name
bone = arm.edit_bones.new('ear.L.001')
bone.head[:] = 0.0989, -0.0336, 1.9017
bone.tail[:] = 0.1200, -0.0088, 1.9074
bone.roll = 0.0656
bone.use_connect = True
bone.parent = arm.edit_bones[bones['ear.L']]
bones['ear.L.001'] = bone.name
bone = arm.edit_bones.new('ear.R.001')
bone.head[:] = -0.0989, -0.0336, 1.9017
bone.tail[:] = -0.1200, -0.0088, 1.9074
bone.roll = -0.0656
bone.use_connect = True
bone.parent = arm.edit_bones[bones['ear.R']]
bones['ear.R.001'] = bone.name
bone = arm.edit_bones.new('lip.T.R.001')
bone.head[:] = -0.0195, -0.1656, 1.8146
bone.tail[:] = -0.0352, -0.1494, 1.8074
bone.roll = -0.0236
bone.use_connect = True
bone.parent = arm.edit_bones[bones['lip.T.R']]
bones['lip.T.R.001'] = bone.name
bone = arm.edit_bones.new('lip.B.R.001')
bone.head[:] = -0.0185, -0.1585, 1.8028
bone.tail[:] = -0.0352, -0.1494, 1.8074
bone.roll = -0.0731
bone.use_connect = True
bone.parent = arm.edit_bones[bones['lip.B.R']]
bones['lip.B.R.001'] = bone.name
bone = arm.edit_bones.new('brow.B.L.001')
bone.head[:] = 0.0704, -0.1349, 1.9078
bone.tail[:] = 0.0577, -0.1427, 1.9093
bone.roll = 0.0192
bone.use_connect = True
bone.parent = arm.edit_bones[bones['brow.B.L']]
bones['brow.B.L.001'] = bone.name
bone = arm.edit_bones.new('lid.T.L.001')
bone.head[:] = 0.0678, -0.1356, 1.8995
bone.tail[:] = 0.0550, -0.1436, 1.9022
bone.roll = 0.1837
bone.use_connect = True
bone.parent = arm.edit_bones[bones['lid.T.L']]
bones['lid.T.L.001'] = bone.name
bone = arm.edit_bones.new('brow.B.R.001')
bone.head[:] = -0.0704, -0.1349, 1.9078
bone.tail[:] = -0.0577, -0.1427, 1.9093
bone.roll = -0.0192
bone.use_connect = True
bone.parent = arm.edit_bones[bones['brow.B.R']]
bones['brow.B.R.001'] = bone.name
bone = arm.edit_bones.new('lid.T.R.001')
bone.head[:] = -0.0678, -0.1356, 1.8995
bone.tail[:] = -0.0550, -0.1436, 1.9022
bone.roll = -0.1837
bone.use_connect = True
bone.parent = arm.edit_bones[bones['lid.T.R']]
bones['lid.T.R.001'] = bone.name
bone = arm.edit_bones.new('forehead.L.001')
bone.head[:] = 0.0479, -0.1174, 1.9756
bone.tail[:] = 0.0588, -0.1421, 1.9255
bone.roll = 0.9928
bone.use_connect = False
bone.parent = arm.edit_bones[bones['forehead.L']]
bones['forehead.L.001'] = bone.name
bone = arm.edit_bones.new('forehead.R.001')
bone.head[:] = -0.0479, -0.1174, 1.9756
bone.tail[:] = -0.0588, -0.1421, 1.9255
bone.roll = -0.9928
bone.use_connect = False
bone.parent = arm.edit_bones[bones['forehead.R']]
bones['forehead.R.001'] = bone.name
bone = arm.edit_bones.new('cheek.T.L.001')
bone.head[:] = 0.0565, -0.1430, 1.8517
bone.tail[:] = 0.0188, -0.1448, 1.8822
bone.roll = 0.1387
bone.use_connect = True
bone.parent = arm.edit_bones[bones['cheek.T.L']]
bones['cheek.T.L.001'] = bone.name
bone = arm.edit_bones.new('cheek.T.R.001')
bone.head[:] = -0.0565, -0.1430, 1.8517
bone.tail[:] = -0.0188, -0.1448, 1.8822
bone.roll = -0.1387
bone.use_connect = True
bone.parent = arm.edit_bones[bones['cheek.T.R']]
bones['cheek.T.R.001'] = bone.name
bone = arm.edit_bones.new('tongue.001')
bone.head[:] = 0.0000, -0.1101, 1.8002
bone.tail[:] = 0.0000, -0.0761, 1.7949
bone.roll = 0.0000
bone.use_connect = True
bone.parent = arm.edit_bones[bones['tongue']]
bones['tongue.001'] = bone.name
bone = arm.edit_bones.new('f_index.01.L')
bone.head[:] = 0.7464, 0.0051, 1.2482
bone.tail[:] = 0.7718, 0.0013, 1.2112
bone.roll = -2.0315
bone.use_connect = False
bone.parent = arm.edit_bones[bones['palm.01.L']]
bones['f_index.01.L'] = bone.name
bone = arm.edit_bones.new('thumb.01.L')
bone.head[:] = 0.6705, 0.0214, 1.2738
bone.tail[:] = 0.6857, 0.0015, 1.2404
bone.roll = -0.1587
bone.use_connect = False
bone.parent = arm.edit_bones[bones['palm.01.L']]
bones['thumb.01.L'] = bone.name
bone = arm.edit_bones.new('f_middle.01.L')
bone.head[:] = 0.7518, 0.0277, 1.2487
bone.tail[:] = 0.7762, 0.0234, 1.2058
bone.roll = -2.0067
bone.use_connect = False
bone.parent = arm.edit_bones[bones['palm.02.L']]
bones['f_middle.01.L'] = bone.name
bone = arm.edit_bones.new('f_ring.01.L')
bone.head[:] = 0.7540, 0.0521, 1.2482
bone.tail[:] = 0.7715, 0.0499, 1.2070
bone.roll = -2.0082
bone.use_connect = False
bone.parent = arm.edit_bones[bones['palm.03.L']]
bones['f_ring.01.L'] = bone.name
bone = arm.edit_bones.new('f_pinky.01.L')
bone.head[:] = 0.7528, 0.0763, 1.2428
bone.tail[:] = 0.7589, 0.0765, 1.2156
bone.roll = -1.9749
bone.use_connect = False
bone.parent = arm.edit_bones[bones['palm.04.L']]
bones['f_pinky.01.L'] = bone.name
bone = arm.edit_bones.new('f_index.01.R')
bone.head[:] = -0.7464, 0.0051, 1.2482
bone.tail[:] = -0.7718, 0.0012, 1.2112
bone.roll = 2.0315
bone.use_connect = False
bone.parent = arm.edit_bones[bones['palm.01.R']]
bones['f_index.01.R'] = bone.name
bone = arm.edit_bones.new('thumb.01.R')
bone.head[:] = -0.6705, 0.0214, 1.2738
bone.tail[:] = -0.6857, 0.0015, 1.2404
bone.roll = 0.1587
bone.use_connect = False
bone.parent = arm.edit_bones[bones['palm.01.R']]
bones['thumb.01.R'] = bone.name
bone = arm.edit_bones.new('f_middle.01.R')
bone.head[:] = -0.7518, 0.0277, 1.2487
bone.tail[:] = -0.7762, 0.0233, 1.2058
bone.roll = 2.0067
bone.use_connect = False
bone.parent = arm.edit_bones[bones['palm.02.R']]
bones['f_middle.01.R'] = bone.name
bone = arm.edit_bones.new('f_ring.01.R')
bone.head[:] = -0.7540, 0.0521, 1.2482
bone.tail[:] = -0.7715, 0.0499, 1.2070
bone.roll = 2.0082
bone.use_connect = False
bone.parent = arm.edit_bones[bones['palm.03.R']]
bones['f_ring.01.R'] = bone.name
bone = arm.edit_bones.new('f_pinky.01.R')
bone.head[:] = -0.7528, 0.0763, 1.2428
bone.tail[:] = -0.7589, 0.0765, 1.2156
bone.roll = 1.9749
bone.use_connect = False
bone.parent = arm.edit_bones[bones['palm.04.R']]
bones['f_pinky.01.R'] = bone.name
bone = arm.edit_bones.new('nose.002')
bone.head[:] = 0.0000, -0.1965, 1.8450
bone.tail[:] = 0.0000, -0.1854, 1.8402
bone.roll = 0.0000
bone.use_connect = True
bone.parent = arm.edit_bones[bones['nose.001']]
bones['nose.002'] = bone.name
bone = arm.edit_bones.new('chin.001')
bone.head[:] = 0.0000, -0.1634, 1.7692
bone.tail[:] = 0.0000, -0.1599, 1.7909
bone.roll = 0.0000
bone.use_connect = True
bone.parent = arm.edit_bones[bones['chin']]
bones['chin.001'] = bone.name
bone = arm.edit_bones.new('ear.L.002')
bone.head[:] = 0.1200, -0.0088, 1.9074
bone.tail[:] = 0.1206, -0.0101, 1.8695
bone.roll = -0.0265
bone.use_connect = True
bone.parent = arm.edit_bones[bones['ear.L.001']]
bones['ear.L.002'] = bone.name
bone = arm.edit_bones.new('ear.R.002')
bone.head[:] = -0.1200, -0.0088, 1.9074
bone.tail[:] = -0.1206, -0.0101, 1.8695
bone.roll = 0.0265
bone.use_connect = True
bone.parent = arm.edit_bones[bones['ear.R.001']]
bones['ear.R.002'] = bone.name
bone = arm.edit_bones.new('brow.B.L.002')
bone.head[:] = 0.0577, -0.1427, 1.9093
bone.tail[:] = 0.0388, -0.1418, 1.9069
bone.roll = 0.0847
bone.use_connect = True
bone.parent = arm.edit_bones[bones['brow.B.L.001']]
bones['brow.B.L.002'] = bone.name
bone = arm.edit_bones.new('lid.T.L.002')
bone.head[:] = 0.0550, -0.1436, 1.9022
bone.tail[:] = 0.0425, -0.1427, 1.8987
bone.roll = -0.0940
bone.use_connect = True
bone.parent = arm.edit_bones[bones['lid.T.L.001']]
bones['lid.T.L.002'] = bone.name
bone = arm.edit_bones.new('brow.B.R.002')
bone.head[:] = -0.0577, -0.1427, 1.9093
bone.tail[:] = -0.0388, -0.1418, 1.9069
bone.roll = -0.0847
bone.use_connect = True
bone.parent = arm.edit_bones[bones['brow.B.R.001']]
bones['brow.B.R.002'] = bone.name
bone = arm.edit_bones.new('lid.T.R.002')
bone.head[:] = -0.0550, -0.1436, 1.9022
bone.tail[:] = -0.0425, -0.1427, 1.8987
bone.roll = 0.0940
bone.use_connect = True
bone.parent = arm.edit_bones[bones['lid.T.R.001']]
bones['lid.T.R.002'] = bone.name
bone = arm.edit_bones.new('forehead.L.002')
bone.head[:] = 0.0719, -0.0940, 1.9717
bone.tail[:] = 0.0830, -0.1213, 1.9164
bone.roll = 0.4509
bone.use_connect = False
bone.parent = arm.edit_bones[bones['forehead.L.001']]
bones['forehead.L.002'] = bone.name
bone = arm.edit_bones.new('forehead.R.002')
bone.head[:] = -0.0719, -0.0940, 1.9717
bone.tail[:] = -0.0830, -0.1213, 1.9164
bone.roll = -0.4509
bone.use_connect = False
bone.parent = arm.edit_bones[bones['forehead.R.001']]
bones['forehead.R.002'] = bone.name
bone = arm.edit_bones.new('nose.L')
bone.head[:] = 0.0188, -0.1448, 1.8822
bone.tail[:] = 0.0176, -0.1627, 1.8429
bone.roll = 0.0997
bone.use_connect = True
bone.parent = arm.edit_bones[bones['cheek.T.L.001']]
bones['nose.L'] = bone.name
bone = arm.edit_bones.new('nose.R')
bone.head[:] = -0.0188, -0.1448, 1.8822
bone.tail[:] = -0.0176, -0.1627, 1.8429
bone.roll = -0.0997
bone.use_connect = True
bone.parent = arm.edit_bones[bones['cheek.T.R.001']]
bones['nose.R'] = bone.name
bone = arm.edit_bones.new('tongue.002')
bone.head[:] = 0.0000, -0.0761, 1.7949
bone.tail[:] = 0.0000, -0.0538, 1.7673
bone.roll = 0.0000
bone.use_connect = True
bone.parent = arm.edit_bones[bones['tongue.001']]
bones['tongue.002'] = bone.name
bone = arm.edit_bones.new('f_index.02.L')
bone.head[:] = 0.7718, 0.0013, 1.2112
bone.tail[:] = 0.7840, -0.0003, 1.1858
bone.roll = -1.8799
bone.use_connect = True
bone.parent = arm.edit_bones[bones['f_index.01.L']]
bones['f_index.02.L'] = bone.name
bone = arm.edit_bones.new('thumb.02.L')
bone.head[:] = 0.6857, 0.0015, 1.2404
bone.tail[:] = 0.7056, -0.0057, 1.2145
bone.roll = -0.4798
bone.use_connect = True
bone.parent = arm.edit_bones[bones['thumb.01.L']]
bones['thumb.02.L'] = bone.name
bone = arm.edit_bones.new('f_middle.02.L')
bone.head[:] = 0.7762, 0.0234, 1.2058
bone.tail[:] = 0.7851, 0.0218, 1.1749
bone.roll = -1.8283
bone.use_connect = True
bone.parent = arm.edit_bones[bones['f_middle.01.L']]
bones['f_middle.02.L'] = bone.name
bone = arm.edit_bones.new('f_ring.02.L')
bone.head[:] = 0.7715, 0.0499, 1.2070
bone.tail[:] = 0.7794, 0.0494, 1.1762
bone.roll = -1.8946
bone.use_connect = True
bone.parent = arm.edit_bones[bones['f_ring.01.L']]
bones['f_ring.02.L'] = bone.name
bone = arm.edit_bones.new('f_pinky.02.L')
bone.head[:] = 0.7589, 0.0765, 1.2156
bone.tail[:] = 0.7618, 0.0770, 1.1932
bone.roll = -1.9059
bone.use_connect = True
bone.parent = arm.edit_bones[bones['f_pinky.01.L']]
bones['f_pinky.02.L'] = bone.name
bone = arm.edit_bones.new('f_index.02.R')
bone.head[:] = -0.7718, 0.0012, 1.2112
bone.tail[:] = -0.7840, -0.0003, 1.1858
bone.roll = 1.8799
bone.use_connect = True
bone.parent = arm.edit_bones[bones['f_index.01.R']]
bones['f_index.02.R'] = bone.name
bone = arm.edit_bones.new('thumb.02.R')
bone.head[:] = -0.6857, 0.0015, 1.2404
bone.tail[:] = -0.7056, -0.0057, 1.2145
bone.roll = 0.4798
bone.use_connect = True
bone.parent = arm.edit_bones[bones['thumb.01.R']]
bones['thumb.02.R'] = bone.name
bone = arm.edit_bones.new('f_middle.02.R')
bone.head[:] = -0.7762, 0.0233, 1.2058
bone.tail[:] = -0.7851, 0.0218, 1.1749
bone.roll = 1.8283
bone.use_connect = True
bone.parent = arm.edit_bones[bones['f_middle.01.R']]
bones['f_middle.02.R'] = bone.name
bone = arm.edit_bones.new('f_ring.02.R')
bone.head[:] = -0.7715, 0.0499, 1.2070
bone.tail[:] = -0.7794, 0.0494, 1.1762
bone.roll = 1.8946
bone.use_connect = True
bone.parent = arm.edit_bones[bones['f_ring.01.R']]
bones['f_ring.02.R'] = bone.name
bone = arm.edit_bones.new('f_pinky.02.R')
bone.head[:] = -0.7589, 0.0765, 1.2156
bone.tail[:] = -0.7618, 0.0770, 1.1932
bone.roll = 1.9059
bone.use_connect = True
bone.parent = arm.edit_bones[bones['f_pinky.01.R']]
bones['f_pinky.02.R'] = bone.name
bone = arm.edit_bones.new('nose.003')
bone.head[:] = 0.0000, -0.1854, 1.8402
bone.tail[:] = 0.0000, -0.1706, 1.8393
bone.roll = 0.0000
bone.use_connect = True
bone.parent = arm.edit_bones[bones['nose.002']]
bones['nose.003'] = bone.name
bone = arm.edit_bones.new('ear.L.003')
bone.head[:] = 0.1206, -0.0101, 1.8695
bone.tail[:] = 0.1010, -0.0347, 1.8422
bone.roll = 0.3033
bone.use_connect = True
bone.parent = arm.edit_bones[bones['ear.L.002']]
bones['ear.L.003'] = bone.name
bone = arm.edit_bones.new('ear.R.003')
bone.head[:] = -0.1206, -0.0101, 1.8695
bone.tail[:] = -0.1010, -0.0347, 1.8422
bone.roll = -0.3033
bone.use_connect = True
bone.parent = arm.edit_bones[bones['ear.R.002']]
bones['ear.R.003'] = bone.name
bone = arm.edit_bones.new('brow.B.L.003')
bone.head[:] = 0.0388, -0.1418, 1.9069
bone.tail[:] = 0.0221, -0.1397, 1.8950
bone.roll = 0.1405
bone.use_connect = True
bone.parent = arm.edit_bones[bones['brow.B.L.002']]
bones['brow.B.L.003'] = bone.name
bone = arm.edit_bones.new('lid.T.L.003')
bone.head[:] = 0.0425, -0.1427, 1.8987
bone.tail[:] = 0.0262, -0.1418, 1.8891
bone.roll = 0.2194
bone.use_connect = True
bone.parent = arm.edit_bones[bones['lid.T.L.002']]
bones['lid.T.L.003'] = bone.name
bone = arm.edit_bones.new('brow.B.R.003')
bone.head[:] = -0.0388, -0.1418, 1.9069
bone.tail[:] = -0.0221, -0.1397, 1.8950
bone.roll = -0.1405
bone.use_connect = True
bone.parent = arm.edit_bones[bones['brow.B.R.002']]
bones['brow.B.R.003'] = bone.name
bone = arm.edit_bones.new('lid.T.R.003')
bone.head[:] = -0.0425, -0.1427, 1.8987
bone.tail[:] = -0.0262, -0.1418, 1.8891
bone.roll = -0.2194
bone.use_connect = True
bone.parent = arm.edit_bones[bones['lid.T.R.002']]
bones['lid.T.R.003'] = bone.name
bone = arm.edit_bones.new('temple.L')
bone.head[:] = 0.0873, -0.0597, 1.9523
bone.tail[:] = 0.0926, -0.0625, 1.8738
bone.roll = -0.0913
bone.use_connect = False
bone.parent = arm.edit_bones[bones['forehead.L.002']]
bones['temple.L'] = bone.name
bone = arm.edit_bones.new('temple.R')
bone.head[:] = -0.0873, -0.0597, 1.9523
bone.tail[:] = -0.0926, -0.0625, 1.8738
bone.roll = 0.0913
bone.use_connect = False
bone.parent = arm.edit_bones[bones['forehead.R.002']]
bones['temple.R'] = bone.name
bone = arm.edit_bones.new('nose.L.001')
bone.head[:] = 0.0176, -0.1627, 1.8429
bone.tail[:] = 0.0000, -0.1965, 1.8450
bone.roll = 0.1070
bone.use_connect = True
bone.parent = arm.edit_bones[bones['nose.L']]
bones['nose.L.001'] = bone.name
bone = arm.edit_bones.new('nose.R.001')
bone.head[:] = -0.0176, -0.1627, 1.8429
bone.tail[:] = -0.0000, -0.1965, 1.8450
bone.roll = -0.1070
bone.use_connect = True
bone.parent = arm.edit_bones[bones['nose.R']]
bones['nose.R.001'] = bone.name
bone = arm.edit_bones.new('f_index.03.L')
bone.head[:] = 0.7840, -0.0003, 1.1858
bone.tail[:] = 0.7892, 0.0006, 1.1636
bone.roll = -1.6760
bone.use_connect = True
bone.parent = arm.edit_bones[bones['f_index.02.L']]
bones['f_index.03.L'] = bone.name
bone = arm.edit_bones.new('thumb.03.L')
bone.head[:] = 0.7056, -0.0057, 1.2145
bone.tail[:] = 0.7194, -0.0098, 1.1995
bone.roll = -0.5826
bone.use_connect = True
bone.parent = arm.edit_bones[bones['thumb.02.L']]
bones['thumb.03.L'] = bone.name
bone = arm.edit_bones.new('f_middle.03.L')
bone.head[:] = 0.7851, 0.0218, 1.1749
bone.tail[:] = 0.7888, 0.0216, 1.1525
bone.roll = -1.7483
bone.use_connect = True
bone.parent = arm.edit_bones[bones['f_middle.02.L']]
bones['f_middle.03.L'] = bone.name
bone = arm.edit_bones.new('f_ring.03.L')
bone.head[:] = 0.7794, 0.0494, 1.1762
bone.tail[:] = 0.7781, 0.0498, 1.1577
bone.roll = -1.6582
bone.use_connect = True
bone.parent = arm.edit_bones[bones['f_ring.02.L']]
bones['f_ring.03.L'] = bone.name
bone = arm.edit_bones.new('f_pinky.03.L')
bone.head[:] = 0.7618, 0.0770, 1.1932
bone.tail[:] = 0.7611, 0.0772, 1.1782
bone.roll = -1.7639
bone.use_connect = True
bone.parent = arm.edit_bones[bones['f_pinky.02.L']]
bones['f_pinky.03.L'] = bone.name
bone = arm.edit_bones.new('f_index.03.R')
bone.head[:] = -0.7840, -0.0003, 1.1858
bone.tail[:] = -0.7892, 0.0006, 1.1636
bone.roll = 1.6760
bone.use_connect = True
bone.parent = arm.edit_bones[bones['f_index.02.R']]
bones['f_index.03.R'] = bone.name
bone = arm.edit_bones.new('thumb.03.R')
bone.head[:] = -0.7056, -0.0057, 1.2145
bone.tail[:] = -0.7194, -0.0098, 1.1995
bone.roll = 0.5826
bone.use_connect = True
bone.parent = arm.edit_bones[bones['thumb.02.R']]
bones['thumb.03.R'] = bone.name
bone = arm.edit_bones.new('f_middle.03.R')
bone.head[:] = -0.7851, 0.0218, 1.1749
bone.tail[:] = -0.7888, 0.0216, 1.1525
bone.roll = 1.7483
bone.use_connect = True
bone.parent = arm.edit_bones[bones['f_middle.02.R']]
bones['f_middle.03.R'] = bone.name
bone = arm.edit_bones.new('f_ring.03.R')
bone.head[:] = -0.7794, 0.0494, 1.1762
bone.tail[:] = -0.7781, 0.0498, 1.1577
bone.roll = 1.6582
bone.use_connect = True
bone.parent = arm.edit_bones[bones['f_ring.02.R']]
bones['f_ring.03.R'] = bone.name
bone = arm.edit_bones.new('f_pinky.03.R')
bone.head[:] = -0.7618, 0.0770, 1.1932
bone.tail[:] = -0.7611, 0.0772, 1.1782
bone.roll = 1.7639
bone.use_connect = True
bone.parent = arm.edit_bones[bones['f_pinky.02.R']]
bones['f_pinky.03.R'] = bone.name
bone = arm.edit_bones.new('nose.004')
bone.head[:] = 0.0000, -0.1706, 1.8393
bone.tail[:] = 0.0000, -0.1698, 1.8244
bone.roll = 0.0000
bone.use_connect = True
bone.parent = arm.edit_bones[bones['nose.003']]
bones['nose.004'] = bone.name
bone = arm.edit_bones.new('ear.L.004')
bone.head[:] = 0.1010, -0.0347, 1.8422
bone.tail[:] = 0.0919, -0.0309, 1.8622
bone.roll = 0.1518
bone.use_connect = True
bone.parent = arm.edit_bones[bones['ear.L.003']]
bones['ear.L.004'] = bone.name
bone = arm.edit_bones.new('ear.R.004')
bone.head[:] = -0.1010, -0.0347, 1.8422
bone.tail[:] = -0.0919, -0.0309, 1.8622
bone.roll = -0.1518
bone.use_connect = True
bone.parent = arm.edit_bones[bones['ear.R.003']]
bones['ear.R.004'] = bone.name
bone = arm.edit_bones.new('lid.B.L')
bone.head[:] = 0.0262, -0.1418, 1.8891
bone.tail[:] = 0.0393, -0.1425, 1.8854
bone.roll = 0.0756
bone.use_connect = True
bone.parent = arm.edit_bones[bones['lid.T.L.003']]
bones['lid.B.L'] = bone.name
bone = arm.edit_bones.new('lid.B.R')
bone.head[:] = -0.0262, -0.1418, 1.8891
bone.tail[:] = -0.0393, -0.1425, 1.8854
bone.roll = -0.0756
bone.use_connect = True
bone.parent = arm.edit_bones[bones['lid.T.R.003']]
bones['lid.B.R'] = bone.name
bone = arm.edit_bones.new('jaw.L')
bone.head[:] = 0.0926, -0.0625, 1.8738
bone.tail[:] = 0.0783, -0.0689, 1.7975
bone.roll = -0.0899
bone.use_connect = True
bone.parent = arm.edit_bones[bones['temple.L']]
bones['jaw.L'] = bone.name
bone = arm.edit_bones.new('jaw.R')
bone.head[:] = -0.0926, -0.0625, 1.8738
bone.tail[:] = -0.0783, -0.0689, 1.7975
bone.roll = 0.0899
bone.use_connect = True
bone.parent = arm.edit_bones[bones['temple.R']]
bones['jaw.R'] = bone.name
bone = arm.edit_bones.new('lid.B.L.001')
bone.head[:] = 0.0393, -0.1425, 1.8854
bone.tail[:] = 0.0553, -0.1418, 1.8833
bone.roll = 0.1015
bone.use_connect = True
bone.parent = arm.edit_bones[bones['lid.B.L']]
bones['lid.B.L.001'] = bone.name
bone = arm.edit_bones.new('lid.B.R.001')
bone.head[:] = -0.0393, -0.1425, 1.8854
bone.tail[:] = -0.0553, -0.1418, 1.8833
bone.roll = -0.1015
bone.use_connect = True
bone.parent = arm.edit_bones[bones['lid.B.R']]
bones['lid.B.R.001'] = bone.name
bone = arm.edit_bones.new('jaw.L.001')
bone.head[:] = 0.0783, -0.0689, 1.7975
bone.tail[:] = 0.0387, -0.1315, 1.7536
bone.roll = 0.1223
bone.use_connect = True
bone.parent = arm.edit_bones[bones['jaw.L']]
bones['jaw.L.001'] = bone.name
bone = arm.edit_bones.new('jaw.R.001')
bone.head[:] = -0.0783, -0.0689, 1.7975
bone.tail[:] = -0.0387, -0.1315, 1.7536
bone.roll = -0.1223
bone.use_connect = True
bone.parent = arm.edit_bones[bones['jaw.R']]
bones['jaw.R.001'] = bone.name
bone = arm.edit_bones.new('lid.B.L.002')
bone.head[:] = 0.0553, -0.1418, 1.8833
bone.tail[:] = 0.0694, -0.1351, 1.8889
bone.roll = -0.0748
bone.use_connect = True
bone.parent = arm.edit_bones[bones['lid.B.L.001']]
bones['lid.B.L.002'] = bone.name
bone = arm.edit_bones.new('lid.B.R.002')
bone.head[:] = -0.0553, -0.1418, 1.8833
bone.tail[:] = -0.0694, -0.1351, 1.8889
bone.roll = 0.0748
bone.use_connect = True
bone.parent = arm.edit_bones[bones['lid.B.R.001']]
bones['lid.B.R.002'] = bone.name
bone = arm.edit_bones.new('chin.L')
bone.head[:] = 0.0387, -0.1315, 1.7536
bone.tail[:] = 0.0352, -0.1494, 1.8074
bone.roll = -0.2078
bone.use_connect = True
bone.parent = arm.edit_bones[bones['jaw.L.001']]
bones['chin.L'] = bone.name
bone = arm.edit_bones.new('chin.R')
bone.head[:] = -0.0387, -0.1315, 1.7536
bone.tail[:] = -0.0352, -0.1494, 1.8074
bone.roll = 0.2078
bone.use_connect = True
bone.parent = arm.edit_bones[bones['jaw.R.001']]
bones['chin.R'] = bone.name
bone = arm.edit_bones.new('lid.B.L.003')
bone.head[:] = 0.0694, -0.1351, 1.8889
bone.tail[:] = 0.0768, -0.1218, 1.8947
bone.roll = -0.0085
bone.use_connect = True
bone.parent = arm.edit_bones[bones['lid.B.L.002']]
bones['lid.B.L.003'] = bone.name
bone = arm.edit_bones.new('lid.B.R.003')
bone.head[:] = -0.0694, -0.1351, 1.8889
bone.tail[:] = -0.0768, -0.1218, 1.8947
bone.roll = 0.0085
bone.use_connect = True
bone.parent = arm.edit_bones[bones['lid.B.R.002']]
bones['lid.B.R.003'] = bone.name
bone = arm.edit_bones.new('cheek.B.L')
bone.head[:] = 0.0352, -0.1494, 1.8074
bone.tail[:] = 0.0736, -0.1216, 1.8243
bone.roll = 0.0015
bone.use_connect = True
bone.parent = arm.edit_bones[bones['chin.L']]
bones['cheek.B.L'] = bone.name
bone = arm.edit_bones.new('cheek.B.R')
bone.head[:] = -0.0352, -0.1494, 1.8074
bone.tail[:] = -0.0736, -0.1216, 1.8243
bone.roll = -0.0015
bone.use_connect = True
bone.parent = arm.edit_bones[bones['chin.R']]
bones['cheek.B.R'] = bone.name
bone = arm.edit_bones.new('cheek.B.L.001')
bone.head[:] = 0.0736, -0.1216, 1.8243
bone.tail[:] = 0.0848, -0.0940, 1.8870
bone.roll = -0.0000
bone.use_connect = True
bone.parent = arm.edit_bones[bones['cheek.B.L']]
bones['cheek.B.L.001'] = bone.name
bone = arm.edit_bones.new('cheek.B.R.001')
bone.head[:] = -0.0736, -0.1216, 1.8243
bone.tail[:] = -0.0848, -0.0940, 1.8870
bone.roll = 0.0000
bone.use_connect = True
bone.parent = arm.edit_bones[bones['cheek.B.R']]
bones['cheek.B.R.001'] = bone.name
bone = arm.edit_bones.new('brow.T.L')
bone.head[:] = 0.0848, -0.0940, 1.8870
bone.tail[:] = 0.0830, -0.1213, 1.9164
bone.roll = 0.1990
bone.use_connect = True
bone.parent = arm.edit_bones[bones['cheek.B.L.001']]
bones['brow.T.L'] = bone.name
bone = arm.edit_bones.new('brow.T.R')
bone.head[:] = -0.0848, -0.0940, 1.8870
bone.tail[:] = -0.0830, -0.1213, 1.9164
bone.roll = -0.1990
bone.use_connect = True
bone.parent = arm.edit_bones[bones['cheek.B.R.001']]
bones['brow.T.R'] = bone.name
bone = arm.edit_bones.new('brow.T.L.001')
bone.head[:] = 0.0830, -0.1213, 1.9164
bone.tail[:] = 0.0588, -0.1421, 1.9255
bone.roll = 0.2372
bone.use_connect = True
bone.parent = arm.edit_bones[bones['brow.T.L']]
bones['brow.T.L.001'] = bone.name
bone = arm.edit_bones.new('brow.T.R.001')
bone.head[:] = -0.0830, -0.1213, 1.9164
bone.tail[:] = -0.0588, -0.1421, 1.9255
bone.roll = -0.2372
bone.use_connect = True
bone.parent = arm.edit_bones[bones['brow.T.R']]
bones['brow.T.R.001'] = bone.name
bone = arm.edit_bones.new('brow.T.L.002')
bone.head[:] = 0.0588, -0.1421, 1.9255
bone.tail[:] = 0.0215, -0.1546, 1.9144
bone.roll = 0.0724
bone.use_connect = True
bone.parent = arm.edit_bones[bones['brow.T.L.001']]
bones['brow.T.L.002'] = bone.name
bone = arm.edit_bones.new('brow.T.R.002')
bone.head[:] = -0.0588, -0.1421, 1.9255
bone.tail[:] = -0.0215, -0.1546, 1.9144
bone.roll = -0.0724
bone.use_connect = True
bone.parent = arm.edit_bones[bones['brow.T.R.001']]
bones['brow.T.R.002'] = bone.name
bone = arm.edit_bones.new('brow.T.L.003')
bone.head[:] = 0.0215, -0.1546, 1.9144
bone.tail[:] = 0.0000, -0.1536, 1.8978
bone.roll = -0.0423
bone.use_connect = True
bone.parent = arm.edit_bones[bones['brow.T.L.002']]
bones['brow.T.L.003'] = bone.name
bone = arm.edit_bones.new('brow.T.R.003')
bone.head[:] = -0.0215, -0.1546, 1.9144
bone.tail[:] = 0.0000, -0.1536, 1.8978
bone.roll = 0.0423
bone.use_connect = True
bone.parent = arm.edit_bones[bones['brow.T.R.002']]
bones['brow.T.R.003'] = bone.name
bpy.ops.object.mode_set(mode='OBJECT')
pbone = obj.pose.bones[bones['spine']]
pbone.rigify_type = 'pitchipoy.super_torso_turbo'
pbone.lock_location = (False, False, False)
pbone.lock_rotation = (False, False, False)
pbone.lock_rotation_w = False
pbone.lock_scale = (False, False, False)
pbone.rotation_mode = 'QUATERNION'
pbone.bone.layers = [False, False, False, True, False, False, False, False, False, False, False, False, False, False, False, False, False, False, False, False, False, False, False, False, False, False, False, False, False, False, False, False]
try:
pbone.rigify_parameters.chain_bone_controls = "1, 2, 3"
except AttributeError:
pass
try:
pbone.rigify_parameters.neck_pos = 5
except AttributeError:
pass
try:
pbone.rigify_parameters.tweak_layers = [False, False, False, False, True, False, False, False, False, False, False, False, False, False, False, False, False, False, False, False, False, False, False, False, False, False, False, False, False, False, False, False]
except AttributeError:
pass
pbone = obj.pose.bones[bones['spine.001']]
pbone.rigify_type = ''
pbone.lock_location = (False, False, False)
pbone.lock_rotation = (False, False, False)
pbone.lock_rotation_w = False
pbone.lock_scale = (False, False, False)
pbone.rotation_mode = 'QUATERNION'
pbone.bone.layers = [False, False, False, True, False, False, False, False, False, False, False, False, False, False, False, False, False, False, False, False, False, False, False, False, False, False, False, False, False, False, False, False]
pbone = obj.pose.bones[bones['pelvis.L']]
pbone.rigify_type = 'basic.copy'
pbone.lock_location = (False, False, False)
pbone.lock_rotation = (False, False, False)
pbone.lock_rotation_w = False
pbone.lock_scale = (False, False, False)
pbone.rotation_mode = 'YXZ'
pbone.bone.layers = [False, False, False, True, False, False, False, False, False, False, False, False, False, False, False, False, False, False, False, False, False, False, False, False, False, False, False, False, False, False, False, False]
try:
pbone.rigify_parameters.make_control = False
except AttributeError:
pass
pbone = obj.pose.bones[bones['pelvis.R']]
pbone.rigify_type = 'basic.copy'
pbone.lock_location = (False, False, False)
pbone.lock_rotation = (False, False, False)
pbone.lock_rotation_w = False
pbone.lock_scale = (False, False, False)
pbone.rotation_mode = 'YXZ'
pbone.bone.layers = [False, False, False, True, False, False, False, False, False, False, False, False, False, False, False, False, False, False, False, False, False, False, False, False, False, False, False, False, False, False, False, False]
try:
pbone.rigify_parameters.make_control = False
except AttributeError:
pass
pbone = obj.pose.bones[bones['thigh.L']]
pbone.rigify_type = 'pitchipoy.limbs.super_limb'
pbone.lock_location = (False, False, False)
pbone.lock_rotation = (False, False, False)
pbone.lock_rotation_w = False
pbone.lock_scale = (False, False, False)
pbone.rotation_mode = 'QUATERNION'
pbone.bone.layers = [False, False, False, False, False, False, False, False, False, False, False, False, False, True, False, False, False, False, False, False, False, False, False, False, False, False, False, False, False, False, False, False]
try:
pbone.rigify_parameters.separate_ik_layers = True
except AttributeError:
pass
try:
pbone.rigify_parameters.ik_layers = [False, False, False, False, False, False, False, False, False, False, False, False, False, False, True, False, False, False, False, False, False, False, False, False, False, False, False, False, False, False, False, False]
except AttributeError:
pass
try:
pbone.rigify_parameters.separate_hose_layers = True
except AttributeError:
pass
try:
pbone.rigify_parameters.hose_layers = [False, False, False, False, False, False, False, False, False, False, False, False, False, False, False, True, False, False, False, False, False, False, False, False, False, False, False, False, False, False, False, False]
except AttributeError:
pass
try:
pbone.rigify_parameters.limb_type = "leg"
except AttributeError:
pass
try:
pbone.rigify_parameters.fk_layers = [False, False, False, False, False, False, False, False, False, False, False, False, False, False, True, False, False, False, False, False, False, False, False, False, False, False, False, False, False, False, False, False]
except AttributeError:
pass
try:
pbone.rigify_parameters.tweak_layers = [False, False, False, False, False, False, False, False, False, False, False, False, False, False, False, True, False, False, False, False, False, False, False, False, False, False, False, False, False, False, False, False]
except AttributeError:
pass
pbone = obj.pose.bones[bones['thigh.R']]
pbone.rigify_type = 'pitchipoy.limbs.super_limb'
pbone.lock_location = (False, False, False)
pbone.lock_rotation = (False, False, False)
pbone.lock_rotation_w = False
pbone.lock_scale = (False, False, False)
pbone.rotation_mode = 'QUATERNION'
pbone.bone.layers = [False, False, False, False, False, False, False, False, False, False, False, False, False, False, False, False, True, False, False, False, False, False, False, False, False, False, False, False, False, False, False, False]
try:
pbone.rigify_parameters.separate_ik_layers = True
except AttributeError:
pass
try:
pbone.rigify_parameters.ik_layers = [False, False, False, False, False, False, False, False, False, False, False, False, False, False, False, False, False, True, False, False, False, False, False, False, False, False, False, False, False, False, False, False]
except AttributeError:
pass
try:
pbone.rigify_parameters.separate_hose_layers = True
except AttributeError:
pass
try:
pbone.rigify_parameters.hose_layers = [False, False, False, False, False, False, False, False, False, False, False, False, False, False, False, False, False, False, True, False, False, False, False, False, False, False, False, False, False, False, False, False]
except AttributeError:
pass
try:
pbone.rigify_parameters.fk_layers = [False, False, False, False, False, False, False, False, False, False, False, False, False, False, False, False, False, True, False, False, False, False, False, False, False, False, False, False, False, False, False, False]
except AttributeError:
pass
try:
pbone.rigify_parameters.tweak_layers = [False, False, False, False, False, False, False, False, False, False, False, False, False, False, False, False, False, False, True, False, False, False, False, False, False, False, False, False, False, False, False, False]
except AttributeError:
pass
try:
pbone.rigify_parameters.limb_type = "leg"
except AttributeError:
pass
pbone = obj.pose.bones[bones['spine.002']]
pbone.rigify_type = ''
pbone.lock_location = (False, False, False)
pbone.lock_rotation = (False, False, False)
pbone.lock_rotation_w = False
pbone.lock_scale = (False, False, False)
pbone.rotation_mode = 'QUATERNION'
pbone.bone.layers = [False, False, False, True, False, False, False, False, False, False, False, False, False, False, False, False, False, False, False, False, False, False, False, False, False, False, False, False, False, False, False, False]
pbone = obj.pose.bones[bones['shin.L']]
pbone.rigify_type = ''
pbone.lock_location = (False, False, False)
pbone.lock_rotation = (False, False, False)
pbone.lock_rotation_w = False
pbone.lock_scale = (False, False, False)
pbone.rotation_mode = 'QUATERNION'
pbone.bone.layers = [False, False, False, False, False, False, False, False, False, False, False, False, False, True, False, False, False, False, False, False, False, False, False, False, False, False, False, False, False, False, False, False]
pbone = obj.pose.bones[bones['shin.R']]
pbone.rigify_type = ''
pbone.lock_location = (False, False, False)
pbone.lock_rotation = (False, False, False)
pbone.lock_rotation_w = False
pbone.lock_scale = (False, False, False)
pbone.rotation_mode = 'QUATERNION'
pbone.bone.layers = [False, False, False, False, False, False, False, False, False, False, False, False, False, False, False, False, True, False, False, False, False, False, False, False, False, False, False, False, False, False, False, False]
pbone = obj.pose.bones[bones['spine.003']]
pbone.rigify_type = ''
pbone.lock_location = (False, False, False)
pbone.lock_rotation = (False, False, False)
pbone.lock_rotation_w = False
pbone.lock_scale = (False, False, False)
pbone.rotation_mode = 'QUATERNION'
pbone.bone.layers = [False, False, False, True, False, False, False, False, False, False, False, False, False, False, False, False, False, False, False, False, False, False, False, False, False, False, False, False, False, False, False, False]
pbone = obj.pose.bones[bones['foot.L']]
pbone.rigify_type = ''
pbone.lock_location = (False, False, False)
pbone.lock_rotation = (False, False, False)
pbone.lock_rotation_w = False
pbone.lock_scale = (False, False, False)
pbone.rotation_mode = 'QUATERNION'
pbone.bone.layers = [False, False, False, False, False, False, False, False, False, False, False, False, False, True, False, False, False, False, False, False, False, False, False, False, False, False, False, False, False, False, False, False]
pbone = obj.pose.bones[bones['foot.R']]
pbone.rigify_type = ''
pbone.lock_location = (False, False, False)
pbone.lock_rotation = (False, False, False)
pbone.lock_rotation_w = False
pbone.lock_scale = (False, False, False)
pbone.rotation_mode = 'QUATERNION'
pbone.bone.layers = [False, False, False, False, False, False, False, False, False, False, False, False, False, False, False, False, True, False, False, False, False, False, False, False, False, False, False, False, False, False, False, False]
pbone = obj.pose.bones[bones['spine.004']]
pbone.rigify_type = ''
pbone.lock_location = (False, False, False)
pbone.lock_rotation = (False, False, False)
pbone.lock_rotation_w = False
pbone.lock_scale = (False, False, False)
pbone.rotation_mode = 'QUATERNION'
pbone.bone.layers = [False, False, False, True, False, False, False, False, False, False, False, False, False, False, False, False, False, False, False, False, False, False, False, False, False, False, False, False, False, False, False, False]
pbone = obj.pose.bones[bones['shoulder.L']]
pbone.rigify_type = 'basic.copy'
pbone.lock_location = (False, False, False)
pbone.lock_rotation = (False, False, False)
pbone.lock_rotation_w = False
pbone.lock_scale = (False, False, False)
pbone.rotation_mode = 'YXZ'
pbone.bone.layers = [False, False, False, True, False, False, False, False, False, False, False, False, False, False, False, False, False, False, False, False, False, False, False, False, False, False, False, False, False, False, False, False]
pbone = obj.pose.bones[bones['shoulder.R']]
pbone.rigify_type = 'basic.copy'
pbone.lock_location = (False, False, False)
pbone.lock_rotation = (False, False, False)
pbone.lock_rotation_w = False
pbone.lock_scale = (False, False, False)
pbone.rotation_mode = 'YXZ'
pbone.bone.layers = [False, False, False, True, False, False, False, False, False, False, False, False, False, False, False, False, False, False, False, False, False, False, False, False, False, False, False, False, False, False, False, False]
pbone = obj.pose.bones[bones['breast.L']]
pbone.rigify_type = 'pitchipoy.super_copy'
pbone.lock_location = (False, False, False)
pbone.lock_rotation = (False, False, False)
pbone.lock_rotation_w = False
pbone.lock_scale = (False, False, False)
pbone.rotation_mode = 'YXZ'
pbone.bone.layers = [False, False, False, True, False, False, False, False, False, False, False, False, False, False, False, False, False, False, False, False, False, False, False, False, False, False, False, False, False, False, False, False]
pbone = obj.pose.bones[bones['breast.R']]
pbone.rigify_type = 'pitchipoy.super_copy'
pbone.lock_location = (False, False, False)
pbone.lock_rotation = (False, False, False)
pbone.lock_rotation_w = False
pbone.lock_scale = (False, False, False)
pbone.rotation_mode = 'YXZ'
pbone.bone.layers = [False, False, False, True, False, False, False, False, False, False, False, False, False, False, False, False, False, False, False, False, False, False, False, False, False, False, False, False, False, False, False, False]
pbone = obj.pose.bones[bones['toe.L']]
pbone.rigify_type = ''
pbone.lock_location = (False, False, False)
pbone.lock_rotation = (False, False, False)
pbone.lock_rotation_w = False
pbone.lock_scale = (False, False, False)
pbone.rotation_mode = 'QUATERNION'
pbone.bone.layers = [False, False, False, False, False, False, False, False, False, False, False, False, False, True, False, False, False, False, False, False, False, False, False, False, False, False, False, False, False, False, False, False]
pbone = obj.pose.bones[bones['heel.02.L']]
pbone.rigify_type = ''
pbone.lock_location = (False, False, False)
pbone.lock_rotation = (False, False, False)
pbone.lock_rotation_w = False
pbone.lock_scale = (False, False, False)
pbone.rotation_mode = 'QUATERNION'
pbone.bone.layers = [False, False, False, False, False, False, False, False, False, False, False, False, False, True, False, False, False, False, False, False, False, False, False, False, False, False, False, False, False, False, False, False]
pbone = obj.pose.bones[bones['toe.R']]
pbone.rigify_type = ''
pbone.lock_location = (False, False, False)
pbone.lock_rotation = (False, False, False)
pbone.lock_rotation_w = False
pbone.lock_scale = (False, False, False)
pbone.rotation_mode = 'QUATERNION'
pbone.bone.layers = [False, False, False, False, False, False, False, False, False, False, False, False, False, False, False, False, True, False, False, False, False, False, False, False, False, False, False, False, False, False, False, False]
pbone = obj.pose.bones[bones['heel.02.R']]
pbone.rigify_type = ''
pbone.lock_location = (False, False, False)
pbone.lock_rotation = (False, False, False)
pbone.lock_rotation_w = False
pbone.lock_scale = (False, False, False)
pbone.rotation_mode = 'QUATERNION'
pbone.bone.layers = [False, False, False, False, False, False, False, False, False, False, False, False, False, False, False, False, True, False, False, False, False, False, False, False, False, False, False, False, False, False, False, False]
pbone = obj.pose.bones[bones['spine.005']]
pbone.rigify_type = ''
pbone.lock_location = (False, False, False)
pbone.lock_rotation = (False, False, False)
pbone.lock_rotation_w = False
pbone.lock_scale = (False, False, False)
pbone.rotation_mode = 'QUATERNION'
pbone.bone.layers = [False, False, False, True, False, False, False, False, False, False, False, False, False, False, False, False, False, False, False, False, False, False, False, False, False, False, False, False, False, False, False, False]
pbone = obj.pose.bones[bones['upper_arm.L']]
pbone.rigify_type = 'pitchipoy.limbs.super_limb'
pbone.lock_location = (False, False, False)
pbone.lock_rotation = (False, False, False)
pbone.lock_rotation_w = False
pbone.lock_scale = (False, False, False)
pbone.rotation_mode = 'QUATERNION'
pbone.bone.layers = [False, False, False, False, False, False, False, True, False, False, False, False, False, False, False, False, False, False, False, False, False, False, False, False, False, False, False, False, False, False, False, False]
try:
pbone.rigify_parameters.separate_ik_layers = True
except AttributeError:
pass
try:
pbone.rigify_parameters.ik_layers = [False, False, False, False, False, False, False, False, True, False, False, False, False, False, False, False, False, False, False, False, False, False, False, False, False, False, False, False, False, False, False, False]
except AttributeError:
pass
try:
pbone.rigify_parameters.separate_hose_layers = True
except AttributeError:
pass
try:
pbone.rigify_parameters.hose_layers = [False, False, False, False, False, False, False, False, False, True, False, False, False, False, False, False, False, False, False, False, False, False, False, False, False, False, False, False, False, False, False, False]
except AttributeError:
pass
try:
pbone.rigify_parameters.tweak_layers = [False, False, False, False, False, False, False, False, False, True, False, False, False, False, False, False, False, False, False, False, False, False, False, False, False, False, False, False, False, False, False, False]
except AttributeError:
pass
try:
pbone.rigify_parameters.fk_layers = [False, False, False, False, False, False, False, False, True, False, False, False, False, False, False, False, False, False, False, False, False, False, False, False, False, False, False, False, False, False, False, False]
except AttributeError:
pass
pbone = obj.pose.bones[bones['upper_arm.R']]
pbone.rigify_type = 'pitchipoy.limbs.super_limb'
pbone.lock_location = (False, False, False)
pbone.lock_rotation = (False, False, False)
pbone.lock_rotation_w = False
pbone.lock_scale = (False, False, False)
pbone.rotation_mode = 'QUATERNION'
pbone.bone.layers = [False, False, False, False, False, False, False, False, False, False, True, False, False, False, False, False, False, False, False, False, False, False, False, False, False, False, False, False, False, False, False, False]
try:
pbone.rigify_parameters.separate_ik_layers = True
except AttributeError:
pass
try:
pbone.rigify_parameters.ik_layers = [False, False, False, False, False, False, False, False, False, False, False, True, False, False, False, False, False, False, False, False, False, False, False, False, False, False, False, False, False, False, False, False]
except AttributeError:
pass
try:
pbone.rigify_parameters.separate_hose_layers = True
except AttributeError:
pass
try:
pbone.rigify_parameters.hose_layers = [False, False, False, False, False, False, False, False, False, False, False, False, True, False, False, False, False, False, False, False, False, False, False, False, False, False, False, False, False, False, False, False]
except AttributeError:
pass
try:
pbone.rigify_parameters.tweak_layers = [False, False, False, False, False, False, False, False, False, False, False, False, True, False, False, False, False, False, False, False, False, False, False, False, False, False, False, False, False, False, False, False]
except AttributeError:
pass
try:
pbone.rigify_parameters.fk_layers = [False, False, False, False, False, False, False, False, False, False, False, True, False, False, False, False, False, False, False, False, False, False, False, False, False, False, False, False, False, False, False, False]
except AttributeError:
pass
pbone = obj.pose.bones[bones['spine.006']]
pbone.rigify_type = ''
pbone.lock_location = (False, False, False)
pbone.lock_rotation = (False, False, False)
pbone.lock_rotation_w = False
pbone.lock_scale = (False, False, False)
pbone.rotation_mode = 'QUATERNION'
pbone.bone.layers = [False, False, False, True, False, False, False, False, False, False, False, False, False, False, False, False, False, False, False, False, False, False, False, False, False, False, False, False, False, False, False, False]
pbone = obj.pose.bones[bones['forearm.L']]
pbone.rigify_type = ''
pbone.lock_location = (False, False, False)
pbone.lock_rotation = (False, False, False)
pbone.lock_rotation_w = False
pbone.lock_scale = (False, False, False)
pbone.rotation_mode = 'QUATERNION'
pbone.bone.layers = [False, False, False, False, False, False, False, True, False, False, False, False, False, False, False, False, False, False, False, False, False, False, False, False, False, False, False, False, False, False, False, False]
pbone = obj.pose.bones[bones['forearm.R']]
pbone.rigify_type = ''
pbone.lock_location = (False, False, False)
pbone.lock_rotation = (False, False, False)
pbone.lock_rotation_w = False
pbone.lock_scale = (False, False, False)
pbone.rotation_mode = 'QUATERNION'
pbone.bone.layers = [False, False, False, False, False, False, False, False, False, False, True, False, False, False, False, False, False, False, False, False, False, False, False, False, False, False, False, False, False, False, False, False]
pbone = obj.pose.bones[bones['face']]
pbone.rigify_type = 'pitchipoy.super_face'
pbone.lock_location = (False, False, False)
pbone.lock_rotation = (False, False, False)
pbone.lock_rotation_w = False
pbone.lock_scale = (False, False, False)
pbone.rotation_mode = 'QUATERNION'
pbone.bone.layers = [True, False, False, False, False, False, False, False, False, False, False, False, False, False, False, False, False, False, False, False, False, False, False, False, False, False, False, False, False, False, False, False]
try:
pbone.rigify_parameters.secondary_layers = [False, False, True, False, False, False, False, False, False, False, False, False, False, False, False, False, False, False, False, False, False, False, False, False, False, False, False, False, False, False, False, False]
except AttributeError:
pass
pbone = obj.pose.bones[bones['hand.L']]
pbone.rigify_type = ''
pbone.lock_location = (False, False, False)
pbone.lock_rotation = (False, False, False)
pbone.lock_rotation_w = False
pbone.lock_scale = (False, False, False)
pbone.rotation_mode = 'QUATERNION'
pbone.bone.layers = [False, False, False, False, False, False, False, True, False, False, False, False, False, False, False, False, False, False, False, False, False, False, False, False, False, False, False, False, False, False, False, False]
pbone = obj.pose.bones[bones['hand.R']]
pbone.rigify_type = ''
pbone.lock_location = (False, False, False)
pbone.lock_rotation = (False, False, False)
pbone.lock_rotation_w = False
pbone.lock_scale = (False, False, False)
pbone.rotation_mode = 'QUATERNION'
pbone.bone.layers = [False, False, False, False, False, False, False, False, False, False, True, False, False, False, False, False, False, False, False, False, False, False, False, False, False, False, False, False, False, False, False, False]
pbone = obj.pose.bones[bones['nose']]
pbone.rigify_type = ''
pbone.lock_location = (False, False, False)
pbone.lock_rotation = (False, False, False)
pbone.lock_rotation_w = False
pbone.lock_scale = (False, False, False)
pbone.rotation_mode = 'QUATERNION'
pbone.bone.layers = [True, False, False, False, False, False, False, False, False, False, False, False, False, False, False, False, False, False, False, False, False, False, False, False, False, False, False, False, False, False, False, False]
pbone = obj.pose.bones[bones['lip.T.L']]
pbone.rigify_type = ''
pbone.lock_location = (False, False, False)
pbone.lock_rotation = (False, False, False)
pbone.lock_rotation_w = False
pbone.lock_scale = (False, False, False)
pbone.rotation_mode = 'QUATERNION'
pbone.bone.layers = [True, False, False, False, False, False, False, False, False, False, False, False, False, False, False, False, False, False, False, False, False, False, False, False, False, False, False, False, False, False, False, False]
pbone = obj.pose.bones[bones['lip.B.L']]
pbone.rigify_type = ''
pbone.lock_location = (False, False, False)
pbone.lock_rotation = (False, False, False)
pbone.lock_rotation_w = False
pbone.lock_scale = (False, False, False)
pbone.rotation_mode = 'QUATERNION'
pbone.bone.layers = [True, False, False, False, False, False, False, False, False, False, False, False, False, False, False, False, False, False, False, False, False, False, False, False, False, False, False, False, False, False, False, False]
pbone = obj.pose.bones[bones['jaw']]
pbone.rigify_type = ''
pbone.lock_location = (False, False, False)
pbone.lock_rotation = (False, False, False)
pbone.lock_rotation_w = False
pbone.lock_scale = (False, False, False)
pbone.rotation_mode = 'QUATERNION'
pbone.bone.layers = [True, False, False, False, False, False, False, False, False, False, False, False, False, False, False, False, False, False, False, False, False, False, False, False, False, False, False, False, False, False, False, False]
pbone = obj.pose.bones[bones['ear.L']]
pbone.rigify_type = ''
pbone.lock_location = (False, False, False)
pbone.lock_rotation = (False, False, False)
pbone.lock_rotation_w = False
pbone.lock_scale = (False, False, False)
pbone.rotation_mode = 'QUATERNION'
pbone.bone.layers = [True, False, False, False, False, False, False, False, False, False, False, False, False, False, False, False, False, False, False, False, False, False, False, False, False, False, False, False, False, False, False, False]
pbone = obj.pose.bones[bones['ear.R']]
pbone.rigify_type = ''
pbone.lock_location = (False, False, False)
pbone.lock_rotation = (False, False, False)
pbone.lock_rotation_w = False
pbone.lock_scale = (False, False, False)
pbone.rotation_mode = 'QUATERNION'
pbone.bone.layers = [True, False, False, False, False, False, False, False, False, False, False, False, False, False, False, False, False, False, False, False, False, False, False, False, False, False, False, False, False, False, False, False]
pbone = obj.pose.bones[bones['lip.T.R']]
pbone.rigify_type = ''
pbone.lock_location = (False, False, False)
pbone.lock_rotation = (False, False, False)
pbone.lock_rotation_w = False
pbone.lock_scale = (False, False, False)
pbone.rotation_mode = 'QUATERNION'
pbone.bone.layers = [True, False, False, False, False, False, False, False, False, False, False, False, False, False, False, False, False, False, False, False, False, False, False, False, False, False, False, False, False, False, False, False]
pbone = obj.pose.bones[bones['lip.B.R']]
pbone.rigify_type = ''
pbone.lock_location = (False, False, False)
pbone.lock_rotation = (False, False, False)
pbone.lock_rotation_w = False
pbone.lock_scale = (False, False, False)
pbone.rotation_mode = 'QUATERNION'
pbone.bone.layers = [True, False, False, False, False, False, False, False, False, False, False, False, False, False, False, False, False, False, False, False, False, False, False, False, False, False, False, False, False, False, False, False]
pbone = obj.pose.bones[bones['brow.B.L']]
pbone.rigify_type = ''
pbone.lock_location = (False, False, False)
pbone.lock_rotation = (False, False, False)
pbone.lock_rotation_w = False
pbone.lock_scale = (False, False, False)
pbone.rotation_mode = 'QUATERNION'
pbone.bone.layers = [True, False, False, False, False, False, False, False, False, False, False, False, False, False, False, False, False, False, False, False, False, False, False, False, False, False, False, False, False, False, False, False]
pbone = obj.pose.bones[bones['lid.T.L']]
pbone.rigify_type = ''
pbone.lock_location = (False, False, False)
pbone.lock_rotation = (False, False, False)
pbone.lock_rotation_w = False
pbone.lock_scale = (False, False, False)
pbone.rotation_mode = 'QUATERNION'
pbone.bone.layers = [True, False, False, False, False, False, False, False, False, False, False, False, False, False, False, False, False, False, False, False, False, False, False, False, False, False, False, False, False, False, False, False]
pbone = obj.pose.bones[bones['brow.B.R']]
pbone.rigify_type = ''
pbone.lock_location = (False, False, False)
pbone.lock_rotation = (False, False, False)
pbone.lock_rotation_w = False
pbone.lock_scale = (False, False, False)
pbone.rotation_mode = 'QUATERNION'
pbone.bone.layers = [True, False, False, False, False, False, False, False, False, False, False, False, False, False, False, False, False, False, False, False, False, False, False, False, False, False, False, False, False, False, False, False]
pbone = obj.pose.bones[bones['lid.T.R']]
pbone.rigify_type = ''
pbone.lock_location = (False, False, False)
pbone.lock_rotation = (False, False, False)
pbone.lock_rotation_w = False
pbone.lock_scale = (False, False, False)
pbone.rotation_mode = 'QUATERNION'
pbone.bone.layers = [True, False, False, False, False, False, False, False, False, False, False, False, False, False, False, False, False, False, False, False, False, False, False, False, False, False, False, False, False, False, False, False]
pbone = obj.pose.bones[bones['forehead.L']]
pbone.rigify_type = ''
pbone.lock_location = (False, False, False)
pbone.lock_rotation = (False, False, False)
pbone.lock_rotation_w = False
pbone.lock_scale = (False, False, False)
pbone.rotation_mode = 'QUATERNION'
pbone.bone.layers = [True, False, False, False, False, False, False, False, False, False, False, False, False, False, False, False, False, False, False, False, False, False, False, False, False, False, False, False, False, False, False, False]
pbone = obj.pose.bones[bones['forehead.R']]
pbone.rigify_type = ''
pbone.lock_location = (False, False, False)
pbone.lock_rotation = (False, False, False)
pbone.lock_rotation_w = False
pbone.lock_scale = (False, False, False)
pbone.rotation_mode = 'QUATERNION'
pbone.bone.layers = [True, False, False, False, False, False, False, False, False, False, False, False, False, False, False, False, False, False, False, False, False, False, False, False, False, False, False, False, False, False, False, False]
pbone = obj.pose.bones[bones['eye.L']]
pbone.rigify_type = ''
pbone.lock_location = (False, False, False)
pbone.lock_rotation = (False, False, False)
pbone.lock_rotation_w = False
pbone.lock_scale = (False, False, False)
pbone.rotation_mode = 'QUATERNION'
pbone.bone.layers = [True, False, False, False, False, False, False, False, False, False, False, False, False, False, False, False, False, False, False, False, False, False, False, False, False, False, False, False, False, False, False, False]
pbone = obj.pose.bones[bones['eye.R']]
pbone.rigify_type = ''
pbone.lock_location = (False, False, False)
pbone.lock_rotation = (False, False, False)
pbone.lock_rotation_w = False
pbone.lock_scale = (False, False, False)
pbone.rotation_mode = 'QUATERNION'
pbone.bone.layers = [True, False, False, False, False, False, False, False, False, False, False, False, False, False, False, False, False, False, False, False, False, False, False, False, False, False, False, False, False, False, False, False]
pbone = obj.pose.bones[bones['cheek.T.L']]
pbone.rigify_type = ''
pbone.lock_location = (False, False, False)
pbone.lock_rotation = (False, False, False)
pbone.lock_rotation_w = False
pbone.lock_scale = (False, False, False)
pbone.rotation_mode = 'QUATERNION'
pbone.bone.layers = [True, False, False, False, False, False, False, False, False, False, False, False, False, False, False, False, False, False, False, False, False, False, False, False, False, False, False, False, False, False, False, False]
pbone = obj.pose.bones[bones['cheek.T.R']]
pbone.rigify_type = ''
pbone.lock_location = (False, False, False)
pbone.lock_rotation = (False, False, False)
pbone.lock_rotation_w = False
pbone.lock_scale = (False, False, False)
pbone.rotation_mode = 'QUATERNION'
pbone.bone.layers = [True, False, False, False, False, False, False, False, False, False, False, False, False, False, False, False, False, False, False, False, False, False, False, False, False, False, False, False, False, False, False, False]
pbone = obj.pose.bones[bones['teeth.T']]
pbone.rigify_type = ''
pbone.lock_location = (False, False, False)
pbone.lock_rotation = (False, False, False)
pbone.lock_rotation_w = False
pbone.lock_scale = (False, False, False)
pbone.rotation_mode = 'QUATERNION'
pbone.bone.layers = [True, False, False, False, False, False, False, False, False, False, False, False, False, False, False, False, False, False, False, False, False, False, False, False, False, False, False, False, False, False, False, False]
pbone = obj.pose.bones[bones['teeth.B']]
pbone.rigify_type = ''
pbone.lock_location = (False, False, False)
pbone.lock_rotation = (False, False, False)
pbone.lock_rotation_w = False
pbone.lock_scale = (False, False, False)
pbone.rotation_mode = 'QUATERNION'
pbone.bone.layers = [True, False, False, False, False, False, False, False, False, False, False, False, False, False, False, False, False, False, False, False, False, False, False, False, False, False, False, False, False, False, False, False]
pbone = obj.pose.bones[bones['tongue']]
pbone.rigify_type = ''
pbone.lock_location = (False, False, False)
pbone.lock_rotation = (False, False, False)
pbone.lock_rotation_w = False
pbone.lock_scale = (False, False, False)
pbone.rotation_mode = 'QUATERNION'
pbone.bone.layers = [True, False, False, False, False, False, False, False, False, False, False, False, False, False, False, False, False, False, False, False, False, False, False, False, False, False, False, False, False, False, False, False]
pbone = obj.pose.bones[bones['palm.01.L']]
pbone.rigify_type = 'pitchipoy.super_palm'
pbone.lock_location = (False, False, False)
pbone.lock_rotation = (False, False, False)
pbone.lock_rotation_w = False
pbone.lock_scale = (False, False, False)
pbone.rotation_mode = 'YXZ'
pbone.bone.layers = [False, False, False, False, False, True, False, False, False, False, False, False, False, False, False, False, False, False, False, False, False, False, False, False, False, False, False, False, False, False, False, False]
pbone = obj.pose.bones[bones['palm.02.L']]
pbone.rigify_type = ''
pbone.lock_location = (False, False, False)
pbone.lock_rotation = (False, False, False)
pbone.lock_rotation_w = False
pbone.lock_scale = (False, False, False)
pbone.rotation_mode = 'YXZ'
pbone.bone.layers = [False, False, False, False, False, True, False, False, False, False, False, False, False, False, False, False, False, False, False, False, False, False, False, False, False, False, False, False, False, False, False, False]
pbone = obj.pose.bones[bones['palm.03.L']]
pbone.rigify_type = ''
pbone.lock_location = (False, False, False)
pbone.lock_rotation = (False, False, False)
pbone.lock_rotation_w = False
pbone.lock_scale = (False, False, False)
pbone.rotation_mode = 'YXZ'
pbone.bone.layers = [False, False, False, False, False, True, False, False, False, False, False, False, False, False, False, False, False, False, False, False, False, False, False, False, False, False, False, False, False, False, False, False]
pbone = obj.pose.bones[bones['palm.04.L']]
pbone.rigify_type = ''
pbone.lock_location = (False, False, False)
pbone.lock_rotation = (False, False, False)
pbone.lock_rotation_w = False
pbone.lock_scale = (False, False, False)
pbone.rotation_mode = 'YXZ'
pbone.bone.layers = [False, False, False, False, False, True, False, False, False, False, False, False, False, False, False, False, False, False, False, False, False, False, False, False, False, False, False, False, False, False, False, False]
pbone = obj.pose.bones[bones['palm.01.R']]
pbone.rigify_type = 'pitchipoy.super_palm'
pbone.lock_location = (False, False, False)
pbone.lock_rotation = (False, False, False)
pbone.lock_rotation_w = False
pbone.lock_scale = (False, False, False)
pbone.rotation_mode = 'YXZ'
pbone.bone.layers = [False, False, False, False, False, True, False, False, False, False, False, False, False, False, False, False, False, False, False, False, False, False, False, False, False, False, False, False, False, False, False, False]
pbone = obj.pose.bones[bones['palm.02.R']]
pbone.rigify_type = ''
pbone.lock_location = (False, False, False)
pbone.lock_rotation = (False, False, False)
pbone.lock_rotation_w = False
pbone.lock_scale = (False, False, False)
pbone.rotation_mode = 'YXZ'
pbone.bone.layers = [False, False, False, False, False, True, False, False, False, False, False, False, False, False, False, False, False, False, False, False, False, False, False, False, False, False, False, False, False, False, False, False]
pbone = obj.pose.bones[bones['palm.03.R']]
pbone.rigify_type = ''
pbone.lock_location = (False, False, False)
pbone.lock_rotation = (False, False, False)
pbone.lock_rotation_w = False
pbone.lock_scale = (False, False, False)
pbone.rotation_mode = 'YXZ'
pbone.bone.layers = [False, False, False, False, False, True, False, False, False, False, False, False, False, False, False, False, False, False, False, False, False, False, False, False, False, False, False, False, False, False, False, False]
pbone = obj.pose.bones[bones['palm.04.R']]
pbone.rigify_type = ''
pbone.lock_location = (False, False, False)
pbone.lock_rotation = (False, False, False)
pbone.lock_rotation_w = False
pbone.lock_scale = (False, False, False)
pbone.rotation_mode = 'YXZ'
pbone.bone.layers = [False, False, False, False, False, True, False, False, False, False, False, False, False, False, False, False, False, False, False, False, False, False, False, False, False, False, False, False, False, False, False, False]
pbone = obj.pose.bones[bones['nose.001']]
pbone.rigify_type = ''
pbone.lock_location = (False, False, False)
pbone.lock_rotation = (False, False, False)
pbone.lock_rotation_w = False
pbone.lock_scale = (False, False, False)
pbone.rotation_mode = 'QUATERNION'
pbone.bone.layers = [True, False, False, False, False, False, False, False, False, False, False, False, False, False, False, False, False, False, False, False, False, False, False, False, False, False, False, False, False, False, False, False]
pbone = obj.pose.bones[bones['lip.T.L.001']]
pbone.rigify_type = ''
pbone.lock_location = (False, False, False)
pbone.lock_rotation = (False, False, False)
pbone.lock_rotation_w = False
pbone.lock_scale = (False, False, False)
pbone.rotation_mode = 'QUATERNION'
pbone.bone.layers = [True, False, False, False, False, False, False, False, False, False, False, False, False, False, False, False, False, False, False, False, False, False, False, False, False, False, False, False, False, False, False, False]
pbone = obj.pose.bones[bones['lip.B.L.001']]
pbone.rigify_type = ''
pbone.lock_location = (False, False, False)
pbone.lock_rotation = (False, False, False)
pbone.lock_rotation_w = False
pbone.lock_scale = (False, False, False)
pbone.rotation_mode = 'QUATERNION'
pbone.bone.layers = [True, False, False, False, False, False, False, False, False, False, False, False, False, False, False, False, False, False, False, False, False, False, False, False, False, False, False, False, False, False, False, False]
pbone = obj.pose.bones[bones['chin']]
pbone.rigify_type = ''
pbone.lock_location = (False, False, False)
pbone.lock_rotation = (False, False, False)
pbone.lock_rotation_w = False
pbone.lock_scale = (False, False, False)
pbone.rotation_mode = 'QUATERNION'
pbone.bone.layers = [True, False, False, False, False, False, False, False, False, False, False, False, False, False, False, False, False, False, False, False, False, False, False, False, False, False, False, False, False, False, False, False]
pbone = obj.pose.bones[bones['ear.L.001']]
pbone.rigify_type = ''
pbone.lock_location = (False, False, False)
pbone.lock_rotation = (False, False, False)
pbone.lock_rotation_w = False
pbone.lock_scale = (False, False, False)
pbone.rotation_mode = 'QUATERNION'
pbone.bone.layers = [True, False, False, False, False, False, False, False, False, False, False, False, False, False, False, False, False, False, False, False, False, False, False, False, False, False, False, False, False, False, False, False]
pbone = obj.pose.bones[bones['ear.R.001']]
pbone.rigify_type = ''
pbone.lock_location = (False, False, False)
pbone.lock_rotation = (False, False, False)
pbone.lock_rotation_w = False
pbone.lock_scale = (False, False, False)
pbone.rotation_mode = 'QUATERNION'
pbone.bone.layers = [True, False, False, False, False, False, False, False, False, False, False, False, False, False, False, False, False, False, False, False, False, False, False, False, False, False, False, False, False, False, False, False]
pbone = obj.pose.bones[bones['lip.T.R.001']]
pbone.rigify_type = ''
pbone.lock_location = (False, False, False)
pbone.lock_rotation = (False, False, False)
pbone.lock_rotation_w = False
pbone.lock_scale = (False, False, False)
pbone.rotation_mode = 'QUATERNION'
pbone.bone.layers = [True, False, False, False, False, False, False, False, False, False, False, False, False, False, False, False, False, False, False, False, False, False, False, False, False, False, False, False, False, False, False, False]
pbone = obj.pose.bones[bones['lip.B.R.001']]
pbone.rigify_type = ''
pbone.lock_location = (False, False, False)
pbone.lock_rotation = (False, False, False)
pbone.lock_rotation_w = False
pbone.lock_scale = (False, False, False)
pbone.rotation_mode = 'QUATERNION'
pbone.bone.layers = [True, False, False, False, False, False, False, False, False, False, False, False, False, False, False, False, False, False, False, False, False, False, False, False, False, False, False, False, False, False, False, False]
pbone = obj.pose.bones[bones['brow.B.L.001']]
pbone.rigify_type = ''
pbone.lock_location = (False, False, False)
pbone.lock_rotation = (False, False, False)
pbone.lock_rotation_w = False
pbone.lock_scale = (False, False, False)
pbone.rotation_mode = 'QUATERNION'
pbone.bone.layers = [True, False, False, False, False, False, False, False, False, False, False, False, False, False, False, False, False, False, False, False, False, False, False, False, False, False, False, False, False, False, False, False]
pbone = obj.pose.bones[bones['lid.T.L.001']]
pbone.rigify_type = ''
pbone.lock_location = (False, False, False)
pbone.lock_rotation = (False, False, False)
pbone.lock_rotation_w = False
pbone.lock_scale = (False, False, False)
pbone.rotation_mode = 'QUATERNION'
pbone.bone.layers = [True, False, False, False, False, False, False, False, False, False, False, False, False, False, False, False, False, False, False, False, False, False, False, False, False, False, False, False, False, False, False, False]
pbone = obj.pose.bones[bones['brow.B.R.001']]
pbone.rigify_type = ''
pbone.lock_location = (False, False, False)
pbone.lock_rotation = (False, False, False)
pbone.lock_rotation_w = False
pbone.lock_scale = (False, False, False)
pbone.rotation_mode = 'QUATERNION'
pbone.bone.layers = [True, False, False, False, False, False, False, False, False, False, False, False, False, False, False, False, False, False, False, False, False, False, False, False, False, False, False, False, False, False, False, False]
pbone = obj.pose.bones[bones['lid.T.R.001']]
pbone.rigify_type = ''
pbone.lock_location = (False, False, False)
pbone.lock_rotation = (False, False, False)
pbone.lock_rotation_w = False
pbone.lock_scale = (False, False, False)
pbone.rotation_mode = 'QUATERNION'
pbone.bone.layers = [True, False, False, False, False, False, False, False, False, False, False, False, False, False, False, False, False, False, False, False, False, False, False, False, False, False, False, False, False, False, False, False]
pbone = obj.pose.bones[bones['forehead.L.001']]
pbone.rigify_type = ''
pbone.lock_location = (False, False, False)
pbone.lock_rotation = (False, False, False)
pbone.lock_rotation_w = False
pbone.lock_scale = (False, False, False)
pbone.rotation_mode = 'QUATERNION'
pbone.bone.layers = [True, False, False, False, False, False, False, False, False, False, False, False, False, False, False, False, False, False, False, False, False, False, False, False, False, False, False, False, False, False, False, False]
pbone = obj.pose.bones[bones['forehead.R.001']]
pbone.rigify_type = ''
pbone.lock_location = (False, False, False)
pbone.lock_rotation = (False, False, False)
pbone.lock_rotation_w = False
pbone.lock_scale = (False, False, False)
pbone.rotation_mode = 'QUATERNION'
pbone.bone.layers = [True, False, False, False, False, False, False, False, False, False, False, False, False, False, False, False, False, False, False, False, False, False, False, False, False, False, False, False, False, False, False, False]
pbone = obj.pose.bones[bones['cheek.T.L.001']]
pbone.rigify_type = ''
pbone.lock_location = (False, False, False)
pbone.lock_rotation = (False, False, False)
pbone.lock_rotation_w = False
pbone.lock_scale = (False, False, False)
pbone.rotation_mode = 'QUATERNION'
pbone.bone.layers = [True, False, False, False, False, False, False, False, False, False, False, False, False, False, False, False, False, False, False, False, False, False, False, False, False, False, False, False, False, False, False, False]
pbone = obj.pose.bones[bones['cheek.T.R.001']]
pbone.rigify_type = ''
pbone.lock_location = (False, False, False)
pbone.lock_rotation = (False, False, False)
pbone.lock_rotation_w = False
pbone.lock_scale = (False, False, False)
pbone.rotation_mode = 'QUATERNION'
pbone.bone.layers = [True, False, False, False, False, False, False, False, False, False, False, False, False, False, False, False, False, False, False, False, False, False, False, False, False, False, False, False, False, False, False, False]
pbone = obj.pose.bones[bones['tongue.001']]
pbone.rigify_type = ''
pbone.lock_location = (False, False, False)
pbone.lock_rotation = (False, False, False)
pbone.lock_rotation_w = False
pbone.lock_scale = (False, False, False)
pbone.rotation_mode = 'QUATERNION'
pbone.bone.layers = [True, False, False, False, False, False, False, False, False, False, False, False, False, False, False, False, False, False, False, False, False, False, False, False, False, False, False, False, False, False, False, False]
pbone = obj.pose.bones[bones['f_index.01.L']]
pbone.rigify_type = 'pitchipoy.simple_tentacle'
pbone.lock_location = (False, False, False)
pbone.lock_rotation = (False, False, False)
pbone.lock_rotation_w = False
pbone.lock_scale = (False, False, False)
pbone.rotation_mode = 'QUATERNION'
pbone.bone.layers = [False, False, False, False, False, True, False, False, False, False, False, False, False, False, False, False, False, False, False, False, False, False, False, False, False, False, False, False, False, False, False, False]
try:
pbone.rigify_parameters.separate_extra_layers = True
except AttributeError:
pass
try:
pbone.rigify_parameters.extra_layers = [False, False, False, False, False, True, False, False, False, False, False, False, False, False, False, False, False, False, False, False, False, False, False, False, False, False, False, False, False, False, False, False]
except AttributeError:
pass
try:
pbone.rigify_parameters.tweak_extra_layers = False
except AttributeError:
pass
pbone = obj.pose.bones[bones['thumb.01.L']]
pbone.rigify_type = 'pitchipoy.simple_tentacle'
pbone.lock_location = (False, False, False)
pbone.lock_rotation = (False, False, False)
pbone.lock_rotation_w = False
pbone.lock_scale = (False, False, False)
pbone.rotation_mode = 'QUATERNION'
pbone.bone.layers = [False, False, False, False, False, True, False, False, False, False, False, False, False, False, False, False, False, False, False, False, False, False, False, False, False, False, False, False, False, False, False, False]
try:
pbone.rigify_parameters.extra_layers = [False, False, False, False, False, True, False, False, False, False, False, False, False, False, False, False, False, False, False, False, False, False, False, False, False, False, False, False, False, False, False, False]
except AttributeError:
pass
try:
pbone.rigify_parameters.separate_extra_layers = True
except AttributeError:
pass
try:
pbone.rigify_parameters.tweak_extra_layers = False
except AttributeError:
pass
pbone = obj.pose.bones[bones['f_middle.01.L']]
pbone.rigify_type = 'pitchipoy.simple_tentacle'
pbone.lock_location = (False, False, False)
pbone.lock_rotation = (False, False, False)
pbone.lock_rotation_w = False
pbone.lock_scale = (False, False, False)
pbone.rotation_mode = 'QUATERNION'
pbone.bone.layers = [False, False, False, False, False, True, False, False, False, False, False, False, False, False, False, False, False, False, False, False, False, False, False, False, False, False, False, False, False, False, False, False]
try:
pbone.rigify_parameters.separate_extra_layers = True
except AttributeError:
pass
try:
pbone.rigify_parameters.extra_layers = [False, False, False, False, False, True, False, False, False, False, False, False, False, False, False, False, False, False, False, False, False, False, False, False, False, False, False, False, False, False, False, False]
except AttributeError:
pass
try:
pbone.rigify_parameters.tweak_extra_layers = False
except AttributeError:
pass
pbone = obj.pose.bones[bones['f_ring.01.L']]
pbone.rigify_type = 'pitchipoy.simple_tentacle'
pbone.lock_location = (False, False, False)
pbone.lock_rotation = (False, False, False)
pbone.lock_rotation_w = False
pbone.lock_scale = (False, False, False)
pbone.rotation_mode = 'QUATERNION'
pbone.bone.layers = [False, False, False, False, False, True, False, False, False, False, False, False, False, False, False, False, False, False, False, False, False, False, False, False, False, False, False, False, False, False, False, False]
try:
pbone.rigify_parameters.separate_extra_layers = True
except AttributeError:
pass
try:
pbone.rigify_parameters.extra_layers = [False, False, False, False, False, True, False, False, False, False, False, False, False, False, False, False, False, False, False, False, False, False, False, False, False, False, False, False, False, False, False, False]
except AttributeError:
pass
try:
pbone.rigify_parameters.tweak_extra_layers = False
except AttributeError:
pass
pbone = obj.pose.bones[bones['f_pinky.01.L']]
pbone.rigify_type = 'pitchipoy.simple_tentacle'
pbone.lock_location = (False, False, False)
pbone.lock_rotation = (False, False, False)
pbone.lock_rotation_w = False
pbone.lock_scale = (False, False, False)
pbone.rotation_mode = 'QUATERNION'
pbone.bone.layers = [False, False, False, False, False, True, False, False, False, False, False, False, False, False, False, False, False, False, False, False, False, False, False, False, False, False, False, False, False, False, False, False]
try:
pbone.rigify_parameters.separate_extra_layers = True
except AttributeError:
pass
try:
pbone.rigify_parameters.extra_layers = [False, False, False, False, False, True, False, False, False, False, False, False, False, False, False, False, False, False, False, False, False, False, False, False, False, False, False, False, False, False, False, False]
except AttributeError:
pass
try:
pbone.rigify_parameters.tweak_extra_layers = False
except AttributeError:
pass
pbone = obj.pose.bones[bones['f_index.01.R']]
pbone.rigify_type = 'pitchipoy.simple_tentacle'
pbone.lock_location = (False, False, False)
pbone.lock_rotation = (False, False, False)
pbone.lock_rotation_w = False
pbone.lock_scale = (False, False, False)
pbone.rotation_mode = 'QUATERNION'
pbone.bone.layers = [False, False, False, False, False, True, False, False, False, False, False, False, False, False, False, False, False, False, False, False, False, False, False, False, False, False, False, False, False, False, False, False]
try:
pbone.rigify_parameters.extra_layers = [False, False, False, False, False, True, False, False, False, False, False, False, False, False, False, False, False, False, False, False, False, False, False, False, False, False, False, False, False, False, False, False]
except AttributeError:
pass
try:
pbone.rigify_parameters.separate_extra_layers = True
except AttributeError:
pass
try:
pbone.rigify_parameters.tweak_extra_layers = False
except AttributeError:
pass
pbone = obj.pose.bones[bones['thumb.01.R']]
pbone.rigify_type = 'pitchipoy.simple_tentacle'
pbone.lock_location = (False, False, False)
pbone.lock_rotation = (False, False, False)
pbone.lock_rotation_w = False
pbone.lock_scale = (False, False, False)
pbone.rotation_mode = 'QUATERNION'
pbone.bone.layers = [False, False, False, False, False, True, False, False, False, False, False, False, False, False, False, False, False, False, False, False, False, False, False, False, False, False, False, False, False, False, False, False]
try:
pbone.rigify_parameters.separate_extra_layers = True
except AttributeError:
pass
try:
pbone.rigify_parameters.extra_layers = [False, False, False, False, False, True, False, False, False, False, False, False, False, False, False, False, False, False, False, False, False, False, False, False, False, False, False, False, False, False, False, False]
except AttributeError:
pass
try:
pbone.rigify_parameters.tweak_extra_layers = False
except AttributeError:
pass
pbone = obj.pose.bones[bones['f_middle.01.R']]
pbone.rigify_type = 'pitchipoy.simple_tentacle'
pbone.lock_location = (False, False, False)
pbone.lock_rotation = (False, False, False)
pbone.lock_rotation_w = False
pbone.lock_scale = (False, False, False)
pbone.rotation_mode = 'QUATERNION'
pbone.bone.layers = [False, False, False, False, False, True, False, False, False, False, False, False, False, False, False, False, False, False, False, False, False, False, False, False, False, False, False, False, False, False, False, False]
try:
pbone.rigify_parameters.separate_extra_layers = True
except AttributeError:
pass
try:
pbone.rigify_parameters.extra_layers = [False, False, False, False, False, True, False, False, False, False, False, False, False, False, False, False, False, False, False, False, False, False, False, False, False, False, False, False, False, False, False, False]
except AttributeError:
pass
try:
pbone.rigify_parameters.tweak_extra_layers = False
except AttributeError:
pass
pbone = obj.pose.bones[bones['f_ring.01.R']]
pbone.rigify_type = 'pitchipoy.simple_tentacle'
pbone.lock_location = (False, False, False)
pbone.lock_rotation = (False, False, False)
pbone.lock_rotation_w = False
pbone.lock_scale = (False, False, False)
pbone.rotation_mode = 'QUATERNION'
pbone.bone.layers = [False, False, False, False, False, True, False, False, False, False, False, False, False, False, False, False, False, False, False, False, False, False, False, False, False, False, False, False, False, False, False, False]
try:
pbone.rigify_parameters.separate_extra_layers = True
except AttributeError:
pass
try:
pbone.rigify_parameters.extra_layers = [False, False, False, False, False, True, False, False, False, False, False, False, False, False, False, False, False, False, False, False, False, False, False, False, False, False, False, False, False, False, False, False]
except AttributeError:
pass
try:
pbone.rigify_parameters.tweak_extra_layers = False
except AttributeError:
pass
pbone = obj.pose.bones[bones['f_pinky.01.R']]
pbone.rigify_type = 'pitchipoy.simple_tentacle'
pbone.lock_location = (False, False, False)
pbone.lock_rotation = (False, False, False)
pbone.lock_rotation_w = False
pbone.lock_scale = (False, False, False)
pbone.rotation_mode = 'QUATERNION'
pbone.bone.layers = [False, False, False, False, False, True, False, False, False, False, False, False, False, False, False, False, False, False, False, False, False, False, False, False, False, False, False, False, False, False, False, False]
try:
pbone.rigify_parameters.separate_extra_layers = True
except AttributeError:
pass
try:
pbone.rigify_parameters.extra_layers = [False, False, False, False, False, True, False, False, False, False, False, False, False, False, False, False, False, False, False, False, False, False, False, False, False, False, False, False, False, False, False, False]
except AttributeError:
pass
try:
pbone.rigify_parameters.tweak_extra_layers = False
except AttributeError:
pass
pbone = obj.pose.bones[bones['nose.002']]
pbone.rigify_type = ''
pbone.lock_location = (False, False, False)
pbone.lock_rotation = (False, False, False)
pbone.lock_rotation_w = False
pbone.lock_scale = (False, False, False)
pbone.rotation_mode = 'QUATERNION'
pbone.bone.layers = [True, False, False, False, False, False, False, False, False, False, False, False, False, False, False, False, False, False, False, False, False, False, False, False, False, False, False, False, False, False, False, False]
pbone = obj.pose.bones[bones['chin.001']]
pbone.rigify_type = ''
pbone.lock_location = (False, False, False)
pbone.lock_rotation = (False, False, False)
pbone.lock_rotation_w = False
pbone.lock_scale = (False, False, False)
pbone.rotation_mode = 'QUATERNION'
pbone.bone.layers = [True, False, False, False, False, False, False, False, False, False, False, False, False, False, False, False, False, False, False, False, False, False, False, False, False, False, False, False, False, False, False, False]
pbone = obj.pose.bones[bones['ear.L.002']]
pbone.rigify_type = ''
pbone.lock_location = (False, False, False)
pbone.lock_rotation = (False, False, False)
pbone.lock_rotation_w = False
pbone.lock_scale = (False, False, False)
pbone.rotation_mode = 'QUATERNION'
pbone.bone.layers = [True, False, False, False, False, False, False, False, False, False, False, False, False, False, False, False, False, False, False, False, False, False, False, False, False, False, False, False, False, False, False, False]
pbone = obj.pose.bones[bones['ear.R.002']]
pbone.rigify_type = ''
pbone.lock_location = (False, False, False)
pbone.lock_rotation = (False, False, False)
pbone.lock_rotation_w = False
pbone.lock_scale = (False, False, False)
pbone.rotation_mode = 'QUATERNION'
pbone.bone.layers = [True, False, False, False, False, False, False, False, False, False, False, False, False, False, False, False, False, False, False, False, False, False, False, False, False, False, False, False, False, False, False, False]
pbone = obj.pose.bones[bones['brow.B.L.002']]
pbone.rigify_type = ''
pbone.lock_location = (False, False, False)
pbone.lock_rotation = (False, False, False)
pbone.lock_rotation_w = False
pbone.lock_scale = (False, False, False)
pbone.rotation_mode = 'QUATERNION'
pbone.bone.layers = [True, False, False, False, False, False, False, False, False, False, False, False, False, False, False, False, False, False, False, False, False, False, False, False, False, False, False, False, False, False, False, False]
pbone = obj.pose.bones[bones['lid.T.L.002']]
pbone.rigify_type = ''
pbone.lock_location = (False, False, False)
pbone.lock_rotation = (False, False, False)
pbone.lock_rotation_w = False
pbone.lock_scale = (False, False, False)
pbone.rotation_mode = 'QUATERNION'
pbone.bone.layers = [True, False, False, False, False, False, False, False, False, False, False, False, False, False, False, False, False, False, False, False, False, False, False, False, False, False, False, False, False, False, False, False]
pbone = obj.pose.bones[bones['brow.B.R.002']]
pbone.rigify_type = ''
pbone.lock_location = (False, False, False)
pbone.lock_rotation = (False, False, False)
pbone.lock_rotation_w = False
pbone.lock_scale = (False, False, False)
pbone.rotation_mode = 'QUATERNION'
pbone.bone.layers = [True, False, False, False, False, False, False, False, False, False, False, False, False, False, False, False, False, False, False, False, False, False, False, False, False, False, False, False, False, False, False, False]
pbone = obj.pose.bones[bones['lid.T.R.002']]
pbone.rigify_type = ''
pbone.lock_location = (False, False, False)
pbone.lock_rotation = (False, False, False)
pbone.lock_rotation_w = False
pbone.lock_scale = (False, False, False)
pbone.rotation_mode = 'QUATERNION'
pbone.bone.layers = [True, False, False, False, False, False, False, False, False, False, False, False, False, False, False, False, False, False, False, False, False, False, False, False, False, False, False, False, False, False, False, False]
pbone = obj.pose.bones[bones['forehead.L.002']]
pbone.rigify_type = ''
pbone.lock_location = (False, False, False)
pbone.lock_rotation = (False, False, False)
pbone.lock_rotation_w = False
pbone.lock_scale = (False, False, False)
pbone.rotation_mode = 'QUATERNION'
pbone.bone.layers = [True, False, False, False, False, False, False, False, False, False, False, False, False, False, False, False, False, False, False, False, False, False, False, False, False, False, False, False, False, False, False, False]
pbone = obj.pose.bones[bones['forehead.R.002']]
pbone.rigify_type = ''
pbone.lock_location = (False, False, False)
pbone.lock_rotation = (False, False, False)
pbone.lock_rotation_w = False
pbone.lock_scale = (False, False, False)
pbone.rotation_mode = 'QUATERNION'
pbone.bone.layers = [True, False, False, False, False, False, False, False, False, False, False, False, False, False, False, False, False, False, False, False, False, False, False, False, False, False, False, False, False, False, False, False]
pbone = obj.pose.bones[bones['nose.L']]
pbone.rigify_type = ''
pbone.lock_location = (False, False, False)
pbone.lock_rotation = (False, False, False)
pbone.lock_rotation_w = False
pbone.lock_scale = (False, False, False)
pbone.rotation_mode = 'QUATERNION'
pbone.bone.layers = [True, False, False, False, False, False, False, False, False, False, False, False, False, False, False, False, False, False, False, False, False, False, False, False, False, False, False, False, False, False, False, False]
pbone = obj.pose.bones[bones['nose.R']]
pbone.rigify_type = ''
pbone.lock_location = (False, False, False)
pbone.lock_rotation = (False, False, False)
pbone.lock_rotation_w = False
pbone.lock_scale = (False, False, False)
pbone.rotation_mode = 'QUATERNION'
pbone.bone.layers = [True, False, False, False, False, False, False, False, False, False, False, False, False, False, False, False, False, False, False, False, False, False, False, False, False, False, False, False, False, False, False, False]
pbone = obj.pose.bones[bones['tongue.002']]
pbone.rigify_type = ''
pbone.lock_location = (False, False, False)
pbone.lock_rotation = (False, False, False)
pbone.lock_rotation_w = False
pbone.lock_scale = (False, False, False)
pbone.rotation_mode = 'QUATERNION'
pbone.bone.layers = [True, False, False, False, False, False, False, False, False, False, False, False, False, False, False, False, False, False, False, False, False, False, False, False, False, False, False, False, False, False, False, False]
pbone = obj.pose.bones[bones['f_index.02.L']]
pbone.rigify_type = ''
pbone.lock_location = (False, False, False)
pbone.lock_rotation = (False, False, False)
pbone.lock_rotation_w = False
pbone.lock_scale = (False, False, False)
pbone.rotation_mode = 'QUATERNION'
pbone.bone.layers = [False, False, False, False, False, True, False, False, False, False, False, False, False, False, False, False, False, False, False, False, False, False, False, False, False, False, False, False, False, False, False, False]
pbone = obj.pose.bones[bones['thumb.02.L']]
pbone.rigify_type = ''
pbone.lock_location = (False, False, False)
pbone.lock_rotation = (False, False, False)
pbone.lock_rotation_w = False
pbone.lock_scale = (False, False, False)
pbone.rotation_mode = 'QUATERNION'
pbone.bone.layers = [False, False, False, False, False, True, False, False, False, False, False, False, False, False, False, False, False, False, False, False, False, False, False, False, False, False, False, False, False, False, False, False]
pbone = obj.pose.bones[bones['f_middle.02.L']]
pbone.rigify_type = ''
pbone.lock_location = (False, False, False)
pbone.lock_rotation = (False, False, False)
pbone.lock_rotation_w = False
pbone.lock_scale = (False, False, False)
pbone.rotation_mode = 'QUATERNION'
pbone.bone.layers = [False, False, False, False, False, True, False, False, False, False, False, False, False, False, False, False, False, False, False, False, False, False, False, False, False, False, False, False, False, False, False, False]
pbone = obj.pose.bones[bones['f_ring.02.L']]
pbone.rigify_type = ''
pbone.lock_location = (False, False, False)
pbone.lock_rotation = (False, False, False)
pbone.lock_rotation_w = False
pbone.lock_scale = (False, False, False)
pbone.rotation_mode = 'QUATERNION'
pbone.bone.layers = [False, False, False, False, False, True, False, False, False, False, False, False, False, False, False, False, False, False, False, False, False, False, False, False, False, False, False, False, False, False, False, False]
pbone = obj.pose.bones[bones['f_pinky.02.L']]
pbone.rigify_type = ''
pbone.lock_location = (False, False, False)
pbone.lock_rotation = (False, False, False)
pbone.lock_rotation_w = False
pbone.lock_scale = (False, False, False)
pbone.rotation_mode = 'QUATERNION'
pbone.bone.layers = [False, False, False, False, False, True, False, False, False, False, False, False, False, False, False, False, False, False, False, False, False, False, False, False, False, False, False, False, False, False, False, False]
pbone = obj.pose.bones[bones['f_index.02.R']]
pbone.rigify_type = ''
pbone.lock_location = (False, False, False)
pbone.lock_rotation = (False, False, False)
pbone.lock_rotation_w = False
pbone.lock_scale = (False, False, False)
pbone.rotation_mode = 'QUATERNION'
pbone.bone.layers = [False, False, False, False, False, True, False, False, False, False, False, False, False, False, False, False, False, False, False, False, False, False, False, False, False, False, False, False, False, False, False, False]
pbone = obj.pose.bones[bones['thumb.02.R']]
pbone.rigify_type = ''
pbone.lock_location = (False, False, False)
pbone.lock_rotation = (False, False, False)
pbone.lock_rotation_w = False
pbone.lock_scale = (False, False, False)
pbone.rotation_mode = 'QUATERNION'
pbone.bone.layers = [False, False, False, False, False, True, False, False, False, False, False, False, False, False, False, False, False, False, False, False, False, False, False, False, False, False, False, False, False, False, False, False]
pbone = obj.pose.bones[bones['f_middle.02.R']]
pbone.rigify_type = ''
pbone.lock_location = (False, False, False)
pbone.lock_rotation = (False, False, False)
pbone.lock_rotation_w = False
pbone.lock_scale = (False, False, False)
pbone.rotation_mode = 'QUATERNION'
pbone.bone.layers = [False, False, False, False, False, True, False, False, False, False, False, False, False, False, False, False, False, False, False, False, False, False, False, False, False, False, False, False, False, False, False, False]
pbone = obj.pose.bones[bones['f_ring.02.R']]
pbone.rigify_type = ''
pbone.lock_location = (False, False, False)
pbone.lock_rotation = (False, False, False)
pbone.lock_rotation_w = False
pbone.lock_scale = (False, False, False)
pbone.rotation_mode = 'QUATERNION'
pbone.bone.layers = [False, False, False, False, False, True, False, False, False, False, False, False, False, False, False, False, False, False, False, False, False, False, False, False, False, False, False, False, False, False, False, False]
pbone = obj.pose.bones[bones['f_pinky.02.R']]
pbone.rigify_type = ''
pbone.lock_location = (False, False, False)
pbone.lock_rotation = (False, False, False)
pbone.lock_rotation_w = False
pbone.lock_scale = (False, False, False)
pbone.rotation_mode = 'QUATERNION'
pbone.bone.layers = [False, False, False, False, False, True, False, False, False, False, False, False, False, False, False, False, False, False, False, False, False, False, False, False, False, False, False, False, False, False, False, False]
pbone = obj.pose.bones[bones['nose.003']]
pbone.rigify_type = ''
pbone.lock_location = (False, False, False)
pbone.lock_rotation = (False, False, False)
pbone.lock_rotation_w = False
pbone.lock_scale = (False, False, False)
pbone.rotation_mode = 'QUATERNION'
pbone.bone.layers = [True, False, False, False, False, False, False, False, False, False, False, False, False, False, False, False, False, False, False, False, False, False, False, False, False, False, False, False, False, False, False, False]
pbone = obj.pose.bones[bones['ear.L.003']]
pbone.rigify_type = ''
pbone.lock_location = (False, False, False)
pbone.lock_rotation = (False, False, False)
pbone.lock_rotation_w = False
pbone.lock_scale = (False, False, False)
pbone.rotation_mode = 'QUATERNION'
pbone.bone.layers = [True, False, False, False, False, False, False, False, False, False, False, False, False, False, False, False, False, False, False, False, False, False, False, False, False, False, False, False, False, False, False, False]
pbone = obj.pose.bones[bones['ear.R.003']]
pbone.rigify_type = ''
pbone.lock_location = (False, False, False)
pbone.lock_rotation = (False, False, False)
pbone.lock_rotation_w = False
pbone.lock_scale = (False, False, False)
pbone.rotation_mode = 'QUATERNION'
pbone.bone.layers = [True, False, False, False, False, False, False, False, False, False, False, False, False, False, False, False, False, False, False, False, False, False, False, False, False, False, False, False, False, False, False, False]
pbone = obj.pose.bones[bones['brow.B.L.003']]
pbone.rigify_type = ''
pbone.lock_location = (False, False, False)
pbone.lock_rotation = (False, False, False)
pbone.lock_rotation_w = False
pbone.lock_scale = (False, False, False)
pbone.rotation_mode = 'QUATERNION'
pbone.bone.layers = [True, False, False, False, False, False, False, False, False, False, False, False, False, False, False, False, False, False, False, False, False, False, False, False, False, False, False, False, False, False, False, False]
pbone = obj.pose.bones[bones['lid.T.L.003']]
pbone.rigify_type = ''
pbone.lock_location = (False, False, False)
pbone.lock_rotation = (False, False, False)
pbone.lock_rotation_w = False
pbone.lock_scale = (False, False, False)
pbone.rotation_mode = 'QUATERNION'
pbone.bone.layers = [True, False, False, False, False, False, False, False, False, False, False, False, False, False, False, False, False, False, False, False, False, False, False, False, False, False, False, False, False, False, False, False]
pbone = obj.pose.bones[bones['brow.B.R.003']]
pbone.rigify_type = ''
pbone.lock_location = (False, False, False)
pbone.lock_rotation = (False, False, False)
pbone.lock_rotation_w = False
pbone.lock_scale = (False, False, False)
pbone.rotation_mode = 'QUATERNION'
pbone.bone.layers = [True, False, False, False, False, False, False, False, False, False, False, False, False, False, False, False, False, False, False, False, False, False, False, False, False, False, False, False, False, False, False, False]
pbone = obj.pose.bones[bones['lid.T.R.003']]
pbone.rigify_type = ''
pbone.lock_location = (False, False, False)
pbone.lock_rotation = (False, False, False)
pbone.lock_rotation_w = False
pbone.lock_scale = (False, False, False)
pbone.rotation_mode = 'QUATERNION'
pbone.bone.layers = [True, False, False, False, False, False, False, False, False, False, False, False, False, False, False, False, False, False, False, False, False, False, False, False, False, False, False, False, False, False, False, False]
pbone = obj.pose.bones[bones['temple.L']]
pbone.rigify_type = ''
pbone.lock_location = (False, False, False)
pbone.lock_rotation = (False, False, False)
pbone.lock_rotation_w = False
pbone.lock_scale = (False, False, False)
pbone.rotation_mode = 'QUATERNION'
pbone.bone.layers = [True, False, False, False, False, False, False, False, False, False, False, False, False, False, False, False, False, False, False, False, False, False, False, False, False, False, False, False, False, False, False, False]
pbone = obj.pose.bones[bones['temple.R']]
pbone.rigify_type = ''
pbone.lock_location = (False, False, False)
pbone.lock_rotation = (False, False, False)
pbone.lock_rotation_w = False
pbone.lock_scale = (False, False, False)
pbone.rotation_mode = 'QUATERNION'
pbone.bone.layers = [True, False, False, False, False, False, False, False, False, False, False, False, False, False, False, False, False, False, False, False, False, False, False, False, False, False, False, False, False, False, False, False]
pbone = obj.pose.bones[bones['nose.L.001']]
pbone.rigify_type = ''
pbone.lock_location = (False, False, False)
pbone.lock_rotation = (False, False, False)
pbone.lock_rotation_w = False
pbone.lock_scale = (False, False, False)
pbone.rotation_mode = 'QUATERNION'
pbone.bone.layers = [True, False, False, False, False, False, False, False, False, False, False, False, False, False, False, False, False, False, False, False, False, False, False, False, False, False, False, False, False, False, False, False]
pbone = obj.pose.bones[bones['nose.R.001']]
pbone.rigify_type = ''
pbone.lock_location = (False, False, False)
pbone.lock_rotation = (False, False, False)
pbone.lock_rotation_w = False
pbone.lock_scale = (False, False, False)
pbone.rotation_mode = 'QUATERNION'
pbone.bone.layers = [True, False, False, False, False, False, False, False, False, False, False, False, False, False, False, False, False, False, False, False, False, False, False, False, False, False, False, False, False, False, False, False]
pbone = obj.pose.bones[bones['f_index.03.L']]
pbone.rigify_type = ''
pbone.lock_location = (False, False, False)
pbone.lock_rotation = (False, False, False)
pbone.lock_rotation_w = False
pbone.lock_scale = (False, False, False)
pbone.rotation_mode = 'QUATERNION'
pbone.bone.layers = [False, False, False, False, False, True, False, False, False, False, False, False, False, False, False, False, False, False, False, False, False, False, False, False, False, False, False, False, False, False, False, False]
pbone = obj.pose.bones[bones['thumb.03.L']]
pbone.rigify_type = ''
pbone.lock_location = (False, False, False)
pbone.lock_rotation = (False, False, False)
pbone.lock_rotation_w = False
pbone.lock_scale = (False, False, False)
pbone.rotation_mode = 'QUATERNION'
pbone.bone.layers = [False, False, False, False, False, True, False, False, False, False, False, False, False, False, False, False, False, False, False, False, False, False, False, False, False, False, False, False, False, False, False, False]
pbone = obj.pose.bones[bones['f_middle.03.L']]
pbone.rigify_type = ''
pbone.lock_location = (False, False, False)
pbone.lock_rotation = (False, False, False)
pbone.lock_rotation_w = False
pbone.lock_scale = (False, False, False)
pbone.rotation_mode = 'QUATERNION'
pbone.bone.layers = [False, False, False, False, False, True, False, False, False, False, False, False, False, False, False, False, False, False, False, False, False, False, False, False, False, False, False, False, False, False, False, False]
pbone = obj.pose.bones[bones['f_ring.03.L']]
pbone.rigify_type = ''
pbone.lock_location = (False, False, False)
pbone.lock_rotation = (False, False, False)
pbone.lock_rotation_w = False
pbone.lock_scale = (False, False, False)
pbone.rotation_mode = 'QUATERNION'
pbone.bone.layers = [False, False, False, False, False, True, False, False, False, False, False, False, False, False, False, False, False, False, False, False, False, False, False, False, False, False, False, False, False, False, False, False]
pbone = obj.pose.bones[bones['f_pinky.03.L']]
pbone.rigify_type = ''
pbone.lock_location = (False, False, False)
pbone.lock_rotation = (False, False, False)
pbone.lock_rotation_w = False
pbone.lock_scale = (False, False, False)
pbone.rotation_mode = 'QUATERNION'
pbone.bone.layers = [False, False, False, False, False, True, False, False, False, False, False, False, False, False, False, False, False, False, False, False, False, False, False, False, False, False, False, False, False, False, False, False]
pbone = obj.pose.bones[bones['f_index.03.R']]
pbone.rigify_type = ''
pbone.lock_location = (False, False, False)
pbone.lock_rotation = (False, False, False)
pbone.lock_rotation_w = False
pbone.lock_scale = (False, False, False)
pbone.rotation_mode = 'QUATERNION'
pbone.bone.layers = [False, False, False, False, False, True, False, False, False, False, False, False, False, False, False, False, False, False, False, False, False, False, False, False, False, False, False, False, False, False, False, False]
pbone = obj.pose.bones[bones['thumb.03.R']]
pbone.rigify_type = ''
pbone.lock_location = (False, False, False)
pbone.lock_rotation = (False, False, False)
pbone.lock_rotation_w = False
pbone.lock_scale = (False, False, False)
pbone.rotation_mode = 'QUATERNION'
pbone.bone.layers = [False, False, False, False, False, True, False, False, False, False, False, False, False, False, False, False, False, False, False, False, False, False, False, False, False, False, False, False, False, False, False, False]
pbone = obj.pose.bones[bones['f_middle.03.R']]
pbone.rigify_type = ''
pbone.lock_location = (False, False, False)
pbone.lock_rotation = (False, False, False)
pbone.lock_rotation_w = False
pbone.lock_scale = (False, False, False)
pbone.rotation_mode = 'QUATERNION'
pbone.bone.layers = [False, False, False, False, False, True, False, False, False, False, False, False, False, False, False, False, False, False, False, False, False, False, False, False, False, False, False, False, False, False, False, False]
pbone = obj.pose.bones[bones['f_ring.03.R']]
pbone.rigify_type = ''
pbone.lock_location = (False, False, False)
pbone.lock_rotation = (False, False, False)
pbone.lock_rotation_w = False
pbone.lock_scale = (False, False, False)
pbone.rotation_mode = 'QUATERNION'
pbone.bone.layers = [False, False, False, False, False, True, False, False, False, False, False, False, False, False, False, False, False, False, False, False, False, False, False, False, False, False, False, False, False, False, False, False]
pbone = obj.pose.bones[bones['f_pinky.03.R']]
pbone.rigify_type = ''
pbone.lock_location = (False, False, False)
pbone.lock_rotation = (False, False, False)
pbone.lock_rotation_w = False
pbone.lock_scale = (False, False, False)
pbone.rotation_mode = 'QUATERNION'
pbone.bone.layers = [False, False, False, False, False, True, False, False, False, False, False, False, False, False, False, False, False, False, False, False, False, False, False, False, False, False, False, False, False, False, False, False]
pbone = obj.pose.bones[bones['nose.004']]
pbone.rigify_type = ''
pbone.lock_location = (False, False, False)
pbone.lock_rotation = (False, False, False)
pbone.lock_rotation_w = False
pbone.lock_scale = (False, False, False)
pbone.rotation_mode = 'QUATERNION'
pbone.bone.layers = [True, False, False, False, False, False, False, False, False, False, False, False, False, False, False, False, False, False, False, False, False, False, False, False, False, False, False, False, False, False, False, False]
pbone = obj.pose.bones[bones['ear.L.004']]
pbone.rigify_type = ''
pbone.lock_location = (False, False, False)
pbone.lock_rotation = (False, False, False)
pbone.lock_rotation_w = False
pbone.lock_scale = (False, False, False)
pbone.rotation_mode = 'QUATERNION'
pbone.bone.layers = [True, False, False, False, False, False, False, False, False, False, False, False, False, False, False, False, False, False, False, False, False, False, False, False, False, False, False, False, False, False, False, False]
pbone = obj.pose.bones[bones['ear.R.004']]
pbone.rigify_type = ''
pbone.lock_location = (False, False, False)
pbone.lock_rotation = (False, False, False)
pbone.lock_rotation_w = False
pbone.lock_scale = (False, False, False)
pbone.rotation_mode = 'QUATERNION'
pbone.bone.layers = [True, False, False, False, False, False, False, False, False, False, False, False, False, False, False, False, False, False, False, False, False, False, False, False, False, False, False, False, False, False, False, False]
pbone = obj.pose.bones[bones['lid.B.L']]
pbone.rigify_type = ''
pbone.lock_location = (False, False, False)
pbone.lock_rotation = (False, False, False)
pbone.lock_rotation_w = False
pbone.lock_scale = (False, False, False)
pbone.rotation_mode = 'QUATERNION'
pbone.bone.layers = [True, False, False, False, False, False, False, False, False, False, False, False, False, False, False, False, False, False, False, False, False, False, False, False, False, False, False, False, False, False, False, False]
pbone = obj.pose.bones[bones['lid.B.R']]
pbone.rigify_type = ''
pbone.lock_location = (False, False, False)
pbone.lock_rotation = (False, False, False)
pbone.lock_rotation_w = False
pbone.lock_scale = (False, False, False)
pbone.rotation_mode = 'QUATERNION'
pbone.bone.layers = [True, False, False, False, False, False, False, False, False, False, False, False, False, False, False, False, False, False, False, False, False, False, False, False, False, False, False, False, False, False, False, False]
pbone = obj.pose.bones[bones['jaw.L']]
pbone.rigify_type = ''
pbone.lock_location = (False, False, False)
pbone.lock_rotation = (False, False, False)
pbone.lock_rotation_w = False
pbone.lock_scale = (False, False, False)
pbone.rotation_mode = 'QUATERNION'
pbone.bone.layers = [True, False, False, False, False, False, False, False, False, False, False, False, False, False, False, False, False, False, False, False, False, False, False, False, False, False, False, False, False, False, False, False]
pbone = obj.pose.bones[bones['jaw.R']]
pbone.rigify_type = ''
pbone.lock_location = (False, False, False)
pbone.lock_rotation = (False, False, False)
pbone.lock_rotation_w = False
pbone.lock_scale = (False, False, False)
pbone.rotation_mode = 'QUATERNION'
pbone.bone.layers = [True, False, False, False, False, False, False, False, False, False, False, False, False, False, False, False, False, False, False, False, False, False, False, False, False, False, False, False, False, False, False, False]
pbone = obj.pose.bones[bones['lid.B.L.001']]
pbone.rigify_type = ''
pbone.lock_location = (False, False, False)
pbone.lock_rotation = (False, False, False)
pbone.lock_rotation_w = False
pbone.lock_scale = (False, False, False)
pbone.rotation_mode = 'QUATERNION'
pbone.bone.layers = [True, False, False, False, False, False, False, False, False, False, False, False, False, False, False, False, False, False, False, False, False, False, False, False, False, False, False, False, False, False, False, False]
pbone = obj.pose.bones[bones['lid.B.R.001']]
pbone.rigify_type = ''
pbone.lock_location = (False, False, False)
pbone.lock_rotation = (False, False, False)
pbone.lock_rotation_w = False
pbone.lock_scale = (False, False, False)
pbone.rotation_mode = 'QUATERNION'
pbone.bone.layers = [True, False, False, False, False, False, False, False, False, False, False, False, False, False, False, False, False, False, False, False, False, False, False, False, False, False, False, False, False, False, False, False]
pbone = obj.pose.bones[bones['jaw.L.001']]
pbone.rigify_type = ''
pbone.lock_location = (False, False, False)
pbone.lock_rotation = (False, False, False)
pbone.lock_rotation_w = False
pbone.lock_scale = (False, False, False)
pbone.rotation_mode = 'QUATERNION'
pbone.bone.layers = [True, False, False, False, False, False, False, False, False, False, False, False, False, False, False, False, False, False, False, False, False, False, False, False, False, False, False, False, False, False, False, False]
pbone = obj.pose.bones[bones['jaw.R.001']]
pbone.rigify_type = ''
pbone.lock_location = (False, False, False)
pbone.lock_rotation = (False, False, False)
pbone.lock_rotation_w = False
pbone.lock_scale = (False, False, False)
pbone.rotation_mode = 'QUATERNION'
pbone.bone.layers = [True, False, False, False, False, False, False, False, False, False, False, False, False, False, False, False, False, False, False, False, False, False, False, False, False, False, False, False, False, False, False, False]
pbone = obj.pose.bones[bones['lid.B.L.002']]
pbone.rigify_type = ''
pbone.lock_location = (False, False, False)
pbone.lock_rotation = (False, False, False)
pbone.lock_rotation_w = False
pbone.lock_scale = (False, False, False)
pbone.rotation_mode = 'QUATERNION'
pbone.bone.layers = [True, False, False, False, False, False, False, False, False, False, False, False, False, False, False, False, False, False, False, False, False, False, False, False, False, False, False, False, False, False, False, False]
pbone = obj.pose.bones[bones['lid.B.R.002']]
pbone.rigify_type = ''
pbone.lock_location = (False, False, False)
pbone.lock_rotation = (False, False, False)
pbone.lock_rotation_w = False
pbone.lock_scale = (False, False, False)
pbone.rotation_mode = 'QUATERNION'
pbone.bone.layers = [True, False, False, False, False, False, False, False, False, False, False, False, False, False, False, False, False, False, False, False, False, False, False, False, False, False, False, False, False, False, False, False]
pbone = obj.pose.bones[bones['chin.L']]
pbone.rigify_type = ''
pbone.lock_location = (False, False, False)
pbone.lock_rotation = (False, False, False)
pbone.lock_rotation_w = False
pbone.lock_scale = (False, False, False)
pbone.rotation_mode = 'QUATERNION'
pbone.bone.layers = [True, False, False, False, False, False, False, False, False, False, False, False, False, False, False, False, False, False, False, False, False, False, False, False, False, False, False, False, False, False, False, False]
pbone = obj.pose.bones[bones['chin.R']]
pbone.rigify_type = ''
pbone.lock_location = (False, False, False)
pbone.lock_rotation = (False, False, False)
pbone.lock_rotation_w = False
pbone.lock_scale = (False, False, False)
pbone.rotation_mode = 'QUATERNION'
pbone.bone.layers = [True, False, False, False, False, False, False, False, False, False, False, False, False, False, False, False, False, False, False, False, False, False, False, False, False, False, False, False, False, False, False, False]
pbone = obj.pose.bones[bones['lid.B.L.003']]
pbone.rigify_type = ''
pbone.lock_location = (False, False, False)
pbone.lock_rotation = (False, False, False)
pbone.lock_rotation_w = False
pbone.lock_scale = (False, False, False)
pbone.rotation_mode = 'QUATERNION'
pbone.bone.layers = [True, False, False, False, False, False, False, False, False, False, False, False, False, False, False, False, False, False, False, False, False, False, False, False, False, False, False, False, False, False, False, False]
pbone = obj.pose.bones[bones['lid.B.R.003']]
pbone.rigify_type = ''
pbone.lock_location = (False, False, False)
pbone.lock_rotation = (False, False, False)
pbone.lock_rotation_w = False
pbone.lock_scale = (False, False, False)
pbone.rotation_mode = 'QUATERNION'
pbone.bone.layers = [True, False, False, False, False, False, False, False, False, False, False, False, False, False, False, False, False, False, False, False, False, False, False, False, False, False, False, False, False, False, False, False]
pbone = obj.pose.bones[bones['cheek.B.L']]
pbone.rigify_type = ''
pbone.lock_location = (False, False, False)
pbone.lock_rotation = (False, False, False)
pbone.lock_rotation_w = False
pbone.lock_scale = (False, False, False)
pbone.rotation_mode = 'QUATERNION'
pbone.bone.layers = [True, False, False, False, False, False, False, False, False, False, False, False, False, False, False, False, False, False, False, False, False, False, False, False, False, False, False, False, False, False, False, False]
pbone = obj.pose.bones[bones['cheek.B.R']]
pbone.rigify_type = ''
pbone.lock_location = (False, False, False)
pbone.lock_rotation = (False, False, False)
pbone.lock_rotation_w = False
pbone.lock_scale = (False, False, False)
pbone.rotation_mode = 'QUATERNION'
pbone.bone.layers = [True, False, False, False, False, False, False, False, False, False, False, False, False, False, False, False, False, False, False, False, False, False, False, False, False, False, False, False, False, False, False, False]
pbone = obj.pose.bones[bones['cheek.B.L.001']]
pbone.rigify_type = ''
pbone.lock_location = (False, False, False)
pbone.lock_rotation = (False, False, False)
pbone.lock_rotation_w = False
pbone.lock_scale = (False, False, False)
pbone.rotation_mode = 'QUATERNION'
pbone.bone.layers = [True, False, False, False, False, False, False, False, False, False, False, False, False, False, False, False, False, False, False, False, False, False, False, False, False, False, False, False, False, False, False, False]
pbone = obj.pose.bones[bones['cheek.B.R.001']]
pbone.rigify_type = ''
pbone.lock_location = (False, False, False)
pbone.lock_rotation = (False, False, False)
pbone.lock_rotation_w = False
pbone.lock_scale = (False, False, False)
pbone.rotation_mode = 'QUATERNION'
pbone.bone.layers = [True, False, False, False, False, False, False, False, False, False, False, False, False, False, False, False, False, False, False, False, False, False, False, False, False, False, False, False, False, False, False, False]
pbone = obj.pose.bones[bones['brow.T.L']]
pbone.rigify_type = ''
pbone.lock_location = (False, False, False)
pbone.lock_rotation = (False, False, False)
pbone.lock_rotation_w = False
pbone.lock_scale = (False, False, False)
pbone.rotation_mode = 'QUATERNION'
pbone.bone.layers = [True, False, False, False, False, False, False, False, False, False, False, False, False, False, False, False, False, False, False, False, False, False, False, False, False, False, False, False, False, False, False, False]
pbone = obj.pose.bones[bones['brow.T.R']]
pbone.rigify_type = ''
pbone.lock_location = (False, False, False)
pbone.lock_rotation = (False, False, False)
pbone.lock_rotation_w = False
pbone.lock_scale = (False, False, False)
pbone.rotation_mode = 'QUATERNION'
pbone.bone.layers = [True, False, False, False, False, False, False, False, False, False, False, False, False, False, False, False, False, False, False, False, False, False, False, False, False, False, False, False, False, False, False, False]
pbone = obj.pose.bones[bones['brow.T.L.001']]
pbone.rigify_type = ''
pbone.lock_location = (False, False, False)
pbone.lock_rotation = (False, False, False)
pbone.lock_rotation_w = False
pbone.lock_scale = (False, False, False)
pbone.rotation_mode = 'QUATERNION'
pbone.bone.layers = [True, False, False, False, False, False, False, False, False, False, False, False, False, False, False, False, False, False, False, False, False, False, False, False, False, False, False, False, False, False, False, False]
pbone = obj.pose.bones[bones['brow.T.R.001']]
pbone.rigify_type = ''
pbone.lock_location = (False, False, False)
pbone.lock_rotation = (False, False, False)
pbone.lock_rotation_w = False
pbone.lock_scale = (False, False, False)
pbone.rotation_mode = 'QUATERNION'
pbone.bone.layers = [True, False, False, False, False, False, False, False, False, False, False, False, False, False, False, False, False, False, False, False, False, False, False, False, False, False, False, False, False, False, False, False]
pbone = obj.pose.bones[bones['brow.T.L.002']]
pbone.rigify_type = ''
pbone.lock_location = (False, False, False)
pbone.lock_rotation = (False, False, False)
pbone.lock_rotation_w = False
pbone.lock_scale = (False, False, False)
pbone.rotation_mode = 'QUATERNION'
pbone.bone.layers = [True, False, False, False, False, False, False, False, False, False, False, False, False, False, False, False, False, False, False, False, False, False, False, False, False, False, False, False, False, False, False, False]
pbone = obj.pose.bones[bones['brow.T.R.002']]
pbone.rigify_type = ''
pbone.lock_location = (False, False, False)
pbone.lock_rotation = (False, False, False)
pbone.lock_rotation_w = False
pbone.lock_scale = (False, False, False)
pbone.rotation_mode = 'QUATERNION'
pbone.bone.layers = [True, False, False, False, False, False, False, False, False, False, False, False, False, False, False, False, False, False, False, False, False, False, False, False, False, False, False, False, False, False, False, False]
pbone = obj.pose.bones[bones['brow.T.L.003']]
pbone.rigify_type = ''
pbone.lock_location = (False, False, False)
pbone.lock_rotation = (False, False, False)
pbone.lock_rotation_w = False
pbone.lock_scale = (False, False, False)
pbone.rotation_mode = 'QUATERNION'
pbone.bone.layers = [True, False, False, False, False, False, False, False, False, False, False, False, False, False, False, False, False, False, False, False, False, False, False, False, False, False, False, False, False, False, False, False]
pbone = obj.pose.bones[bones['brow.T.R.003']]
pbone.rigify_type = ''
pbone.lock_location = (False, False, False)
pbone.lock_rotation = (False, False, False)
pbone.lock_rotation_w = False
pbone.lock_scale = (False, False, False)
pbone.rotation_mode = 'QUATERNION'
pbone.bone.layers = [True, False, False, False, False, False, False, False, False, False, False, False, False, False, False, False, False, False, False, False, False, False, False, False, False, False, False, False, False, False, False, False]
bpy.ops.object.mode_set(mode='EDIT')
for bone in arm.edit_bones:
bone.select = False
bone.select_head = False
bone.select_tail = False
for b in bones:
bone = arm.edit_bones[bones[b]]
bone.select = True
bone.select_head = True
bone.select_tail = True
arm.edit_bones.active = bone
arm.layers = [(x in [0, 3, 5, 7, 10, 13, 16]) for x in range(32)]
if __name__ == "__main__":
create(bpy.context.active_object)
| 52.902242
| 274
| 0.660055
| 21,053
| 143,947
| 4.42108
| 0.023085
| 0.695016
| 0.920206
| 1.064065
| 0.980822
| 0.96317
| 0.959399
| 0.952588
| 0.94251
| 0.921527
| 0
| 0.057116
| 0.184832
| 143,947
| 2,720
| 275
| 52.921691
| 0.736102
| 0.000271
| 0
| 0.59631
| 1
| 0
| 0.056023
| 0.002648
| 0
| 0
| 0
| 0
| 0
| 1
| 0.000369
| false
| 0.022878
| 0.000369
| 0
| 0.000738
| 0
| 0
| 0
| 0
| null | 1
| 1
| 1
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 10
|
985fa76475386726c82455e25a3901d6a6756e78
| 44,848
|
py
|
Python
|
sdk/python/pulumi_azure/webpubsub/service.py
|
henriktao/pulumi-azure
|
f1cbcf100b42b916da36d8fe28be3a159abaf022
|
[
"ECL-2.0",
"Apache-2.0"
] | null | null | null |
sdk/python/pulumi_azure/webpubsub/service.py
|
henriktao/pulumi-azure
|
f1cbcf100b42b916da36d8fe28be3a159abaf022
|
[
"ECL-2.0",
"Apache-2.0"
] | null | null | null |
sdk/python/pulumi_azure/webpubsub/service.py
|
henriktao/pulumi-azure
|
f1cbcf100b42b916da36d8fe28be3a159abaf022
|
[
"ECL-2.0",
"Apache-2.0"
] | null | null | null |
# coding=utf-8
# *** WARNING: this file was generated by the Pulumi Terraform Bridge (tfgen) Tool. ***
# *** Do not edit by hand unless you're certain you know what you are doing! ***
import warnings
import pulumi
import pulumi.runtime
from typing import Any, Mapping, Optional, Sequence, Union, overload
from .. import _utilities
from . import outputs
from ._inputs import *
__all__ = ['ServiceArgs', 'Service']
@pulumi.input_type
class ServiceArgs:
def __init__(__self__, *,
resource_group_name: pulumi.Input[str],
sku: pulumi.Input[str],
aad_auth_enabled: Optional[pulumi.Input[bool]] = None,
capacity: Optional[pulumi.Input[int]] = None,
identity: Optional[pulumi.Input['ServiceIdentityArgs']] = None,
live_trace: Optional[pulumi.Input['ServiceLiveTraceArgs']] = None,
local_auth_enabled: Optional[pulumi.Input[bool]] = None,
location: Optional[pulumi.Input[str]] = None,
name: Optional[pulumi.Input[str]] = None,
public_network_access_enabled: Optional[pulumi.Input[bool]] = None,
tags: Optional[pulumi.Input[Mapping[str, pulumi.Input[str]]]] = None,
tls_client_cert_enabled: Optional[pulumi.Input[bool]] = None):
"""
The set of arguments for constructing a Service resource.
:param pulumi.Input[str] resource_group_name: The name of the resource group in which to create the Web Pubsub service. Changing
this forces a new resource to be created.
:param pulumi.Input[str] sku: Specifies which sku to use. Possible values are `Free_F1` and `Standard_S1`.
:param pulumi.Input[bool] aad_auth_enabled: Whether to enable AAD auth? Defaults to `true`.
:param pulumi.Input[int] capacity: Specifies the number of units associated with this Web Pubsub resource. Valid values are:
Free: `1`, Standard: `1`, `2`, `5`, `10`, `20`, `50`, `100`.
:param pulumi.Input['ServiceIdentityArgs'] identity: An `identity` block as defined below.
:param pulumi.Input['ServiceLiveTraceArgs'] live_trace: A `live_trace` block as defined below.
:param pulumi.Input[bool] local_auth_enabled: Whether to enable local auth? Defaults to `true`.
:param pulumi.Input[str] location: Specifies the supported Azure location where the Web Pubsub service exists. Changing this
forces a new resource to be created.
:param pulumi.Input[str] name: The name of the Web Pubsub service. Changing this forces a new resource to be created.
:param pulumi.Input[bool] public_network_access_enabled: Whether to enable public network access? Defaults to `true`.
:param pulumi.Input[Mapping[str, pulumi.Input[str]]] tags: A mapping of tags to assign to the resource.
:param pulumi.Input[bool] tls_client_cert_enabled: Whether to request client certificate during TLS handshake? Defaults
to `false`.
"""
pulumi.set(__self__, "resource_group_name", resource_group_name)
pulumi.set(__self__, "sku", sku)
if aad_auth_enabled is not None:
pulumi.set(__self__, "aad_auth_enabled", aad_auth_enabled)
if capacity is not None:
pulumi.set(__self__, "capacity", capacity)
if identity is not None:
pulumi.set(__self__, "identity", identity)
if live_trace is not None:
pulumi.set(__self__, "live_trace", live_trace)
if local_auth_enabled is not None:
pulumi.set(__self__, "local_auth_enabled", local_auth_enabled)
if location is not None:
pulumi.set(__self__, "location", location)
if name is not None:
pulumi.set(__self__, "name", name)
if public_network_access_enabled is not None:
pulumi.set(__self__, "public_network_access_enabled", public_network_access_enabled)
if tags is not None:
pulumi.set(__self__, "tags", tags)
if tls_client_cert_enabled is not None:
pulumi.set(__self__, "tls_client_cert_enabled", tls_client_cert_enabled)
@property
@pulumi.getter(name="resourceGroupName")
def resource_group_name(self) -> pulumi.Input[str]:
"""
The name of the resource group in which to create the Web Pubsub service. Changing
this forces a new resource to be created.
"""
return pulumi.get(self, "resource_group_name")
@resource_group_name.setter
def resource_group_name(self, value: pulumi.Input[str]):
pulumi.set(self, "resource_group_name", value)
@property
@pulumi.getter
def sku(self) -> pulumi.Input[str]:
"""
Specifies which sku to use. Possible values are `Free_F1` and `Standard_S1`.
"""
return pulumi.get(self, "sku")
@sku.setter
def sku(self, value: pulumi.Input[str]):
pulumi.set(self, "sku", value)
@property
@pulumi.getter(name="aadAuthEnabled")
def aad_auth_enabled(self) -> Optional[pulumi.Input[bool]]:
"""
Whether to enable AAD auth? Defaults to `true`.
"""
return pulumi.get(self, "aad_auth_enabled")
@aad_auth_enabled.setter
def aad_auth_enabled(self, value: Optional[pulumi.Input[bool]]):
pulumi.set(self, "aad_auth_enabled", value)
@property
@pulumi.getter
def capacity(self) -> Optional[pulumi.Input[int]]:
"""
Specifies the number of units associated with this Web Pubsub resource. Valid values are:
Free: `1`, Standard: `1`, `2`, `5`, `10`, `20`, `50`, `100`.
"""
return pulumi.get(self, "capacity")
@capacity.setter
def capacity(self, value: Optional[pulumi.Input[int]]):
pulumi.set(self, "capacity", value)
@property
@pulumi.getter
def identity(self) -> Optional[pulumi.Input['ServiceIdentityArgs']]:
"""
An `identity` block as defined below.
"""
return pulumi.get(self, "identity")
@identity.setter
def identity(self, value: Optional[pulumi.Input['ServiceIdentityArgs']]):
pulumi.set(self, "identity", value)
@property
@pulumi.getter(name="liveTrace")
def live_trace(self) -> Optional[pulumi.Input['ServiceLiveTraceArgs']]:
"""
A `live_trace` block as defined below.
"""
return pulumi.get(self, "live_trace")
@live_trace.setter
def live_trace(self, value: Optional[pulumi.Input['ServiceLiveTraceArgs']]):
pulumi.set(self, "live_trace", value)
@property
@pulumi.getter(name="localAuthEnabled")
def local_auth_enabled(self) -> Optional[pulumi.Input[bool]]:
"""
Whether to enable local auth? Defaults to `true`.
"""
return pulumi.get(self, "local_auth_enabled")
@local_auth_enabled.setter
def local_auth_enabled(self, value: Optional[pulumi.Input[bool]]):
pulumi.set(self, "local_auth_enabled", value)
@property
@pulumi.getter
def location(self) -> Optional[pulumi.Input[str]]:
"""
Specifies the supported Azure location where the Web Pubsub service exists. Changing this
forces a new resource to be created.
"""
return pulumi.get(self, "location")
@location.setter
def location(self, value: Optional[pulumi.Input[str]]):
pulumi.set(self, "location", value)
@property
@pulumi.getter
def name(self) -> Optional[pulumi.Input[str]]:
"""
The name of the Web Pubsub service. Changing this forces a new resource to be created.
"""
return pulumi.get(self, "name")
@name.setter
def name(self, value: Optional[pulumi.Input[str]]):
pulumi.set(self, "name", value)
@property
@pulumi.getter(name="publicNetworkAccessEnabled")
def public_network_access_enabled(self) -> Optional[pulumi.Input[bool]]:
"""
Whether to enable public network access? Defaults to `true`.
"""
return pulumi.get(self, "public_network_access_enabled")
@public_network_access_enabled.setter
def public_network_access_enabled(self, value: Optional[pulumi.Input[bool]]):
pulumi.set(self, "public_network_access_enabled", value)
@property
@pulumi.getter
def tags(self) -> Optional[pulumi.Input[Mapping[str, pulumi.Input[str]]]]:
"""
A mapping of tags to assign to the resource.
"""
return pulumi.get(self, "tags")
@tags.setter
def tags(self, value: Optional[pulumi.Input[Mapping[str, pulumi.Input[str]]]]):
pulumi.set(self, "tags", value)
@property
@pulumi.getter(name="tlsClientCertEnabled")
def tls_client_cert_enabled(self) -> Optional[pulumi.Input[bool]]:
"""
Whether to request client certificate during TLS handshake? Defaults
to `false`.
"""
return pulumi.get(self, "tls_client_cert_enabled")
@tls_client_cert_enabled.setter
def tls_client_cert_enabled(self, value: Optional[pulumi.Input[bool]]):
pulumi.set(self, "tls_client_cert_enabled", value)
@pulumi.input_type
class _ServiceState:
def __init__(__self__, *,
aad_auth_enabled: Optional[pulumi.Input[bool]] = None,
capacity: Optional[pulumi.Input[int]] = None,
external_ip: Optional[pulumi.Input[str]] = None,
hostname: Optional[pulumi.Input[str]] = None,
identity: Optional[pulumi.Input['ServiceIdentityArgs']] = None,
live_trace: Optional[pulumi.Input['ServiceLiveTraceArgs']] = None,
local_auth_enabled: Optional[pulumi.Input[bool]] = None,
location: Optional[pulumi.Input[str]] = None,
name: Optional[pulumi.Input[str]] = None,
primary_access_key: Optional[pulumi.Input[str]] = None,
primary_connection_string: Optional[pulumi.Input[str]] = None,
public_network_access_enabled: Optional[pulumi.Input[bool]] = None,
public_port: Optional[pulumi.Input[int]] = None,
resource_group_name: Optional[pulumi.Input[str]] = None,
secondary_access_key: Optional[pulumi.Input[str]] = None,
secondary_connection_string: Optional[pulumi.Input[str]] = None,
server_port: Optional[pulumi.Input[int]] = None,
sku: Optional[pulumi.Input[str]] = None,
tags: Optional[pulumi.Input[Mapping[str, pulumi.Input[str]]]] = None,
tls_client_cert_enabled: Optional[pulumi.Input[bool]] = None,
version: Optional[pulumi.Input[str]] = None):
"""
Input properties used for looking up and filtering Service resources.
:param pulumi.Input[bool] aad_auth_enabled: Whether to enable AAD auth? Defaults to `true`.
:param pulumi.Input[int] capacity: Specifies the number of units associated with this Web Pubsub resource. Valid values are:
Free: `1`, Standard: `1`, `2`, `5`, `10`, `20`, `50`, `100`.
:param pulumi.Input[str] hostname: The FQDN of the Web Pubsub service.
:param pulumi.Input['ServiceIdentityArgs'] identity: An `identity` block as defined below.
:param pulumi.Input['ServiceLiveTraceArgs'] live_trace: A `live_trace` block as defined below.
:param pulumi.Input[bool] local_auth_enabled: Whether to enable local auth? Defaults to `true`.
:param pulumi.Input[str] location: Specifies the supported Azure location where the Web Pubsub service exists. Changing this
forces a new resource to be created.
:param pulumi.Input[str] name: The name of the Web Pubsub service. Changing this forces a new resource to be created.
:param pulumi.Input[str] primary_access_key: The primary access key for the Web Pubsub service.
:param pulumi.Input[str] primary_connection_string: The primary connection string for the Web Pubsub service.
:param pulumi.Input[bool] public_network_access_enabled: Whether to enable public network access? Defaults to `true`.
:param pulumi.Input[int] public_port: The publicly accessible port of the Web Pubsub service which is designed for browser/client use.
:param pulumi.Input[str] resource_group_name: The name of the resource group in which to create the Web Pubsub service. Changing
this forces a new resource to be created.
:param pulumi.Input[str] secondary_access_key: The secondary access key for the Web Pubsub service.
:param pulumi.Input[str] secondary_connection_string: The secondary connection string for the Web Pubsub service.
:param pulumi.Input[int] server_port: The publicly accessible port of the Web Pubsub service which is designed for customer server side use.
:param pulumi.Input[str] sku: Specifies which sku to use. Possible values are `Free_F1` and `Standard_S1`.
:param pulumi.Input[Mapping[str, pulumi.Input[str]]] tags: A mapping of tags to assign to the resource.
:param pulumi.Input[bool] tls_client_cert_enabled: Whether to request client certificate during TLS handshake? Defaults
to `false`.
"""
if aad_auth_enabled is not None:
pulumi.set(__self__, "aad_auth_enabled", aad_auth_enabled)
if capacity is not None:
pulumi.set(__self__, "capacity", capacity)
if external_ip is not None:
pulumi.set(__self__, "external_ip", external_ip)
if hostname is not None:
pulumi.set(__self__, "hostname", hostname)
if identity is not None:
pulumi.set(__self__, "identity", identity)
if live_trace is not None:
pulumi.set(__self__, "live_trace", live_trace)
if local_auth_enabled is not None:
pulumi.set(__self__, "local_auth_enabled", local_auth_enabled)
if location is not None:
pulumi.set(__self__, "location", location)
if name is not None:
pulumi.set(__self__, "name", name)
if primary_access_key is not None:
pulumi.set(__self__, "primary_access_key", primary_access_key)
if primary_connection_string is not None:
pulumi.set(__self__, "primary_connection_string", primary_connection_string)
if public_network_access_enabled is not None:
pulumi.set(__self__, "public_network_access_enabled", public_network_access_enabled)
if public_port is not None:
pulumi.set(__self__, "public_port", public_port)
if resource_group_name is not None:
pulumi.set(__self__, "resource_group_name", resource_group_name)
if secondary_access_key is not None:
pulumi.set(__self__, "secondary_access_key", secondary_access_key)
if secondary_connection_string is not None:
pulumi.set(__self__, "secondary_connection_string", secondary_connection_string)
if server_port is not None:
pulumi.set(__self__, "server_port", server_port)
if sku is not None:
pulumi.set(__self__, "sku", sku)
if tags is not None:
pulumi.set(__self__, "tags", tags)
if tls_client_cert_enabled is not None:
pulumi.set(__self__, "tls_client_cert_enabled", tls_client_cert_enabled)
if version is not None:
pulumi.set(__self__, "version", version)
@property
@pulumi.getter(name="aadAuthEnabled")
def aad_auth_enabled(self) -> Optional[pulumi.Input[bool]]:
"""
Whether to enable AAD auth? Defaults to `true`.
"""
return pulumi.get(self, "aad_auth_enabled")
@aad_auth_enabled.setter
def aad_auth_enabled(self, value: Optional[pulumi.Input[bool]]):
pulumi.set(self, "aad_auth_enabled", value)
@property
@pulumi.getter
def capacity(self) -> Optional[pulumi.Input[int]]:
"""
Specifies the number of units associated with this Web Pubsub resource. Valid values are:
Free: `1`, Standard: `1`, `2`, `5`, `10`, `20`, `50`, `100`.
"""
return pulumi.get(self, "capacity")
@capacity.setter
def capacity(self, value: Optional[pulumi.Input[int]]):
pulumi.set(self, "capacity", value)
@property
@pulumi.getter(name="externalIp")
def external_ip(self) -> Optional[pulumi.Input[str]]:
return pulumi.get(self, "external_ip")
@external_ip.setter
def external_ip(self, value: Optional[pulumi.Input[str]]):
pulumi.set(self, "external_ip", value)
@property
@pulumi.getter
def hostname(self) -> Optional[pulumi.Input[str]]:
"""
The FQDN of the Web Pubsub service.
"""
return pulumi.get(self, "hostname")
@hostname.setter
def hostname(self, value: Optional[pulumi.Input[str]]):
pulumi.set(self, "hostname", value)
@property
@pulumi.getter
def identity(self) -> Optional[pulumi.Input['ServiceIdentityArgs']]:
"""
An `identity` block as defined below.
"""
return pulumi.get(self, "identity")
@identity.setter
def identity(self, value: Optional[pulumi.Input['ServiceIdentityArgs']]):
pulumi.set(self, "identity", value)
@property
@pulumi.getter(name="liveTrace")
def live_trace(self) -> Optional[pulumi.Input['ServiceLiveTraceArgs']]:
"""
A `live_trace` block as defined below.
"""
return pulumi.get(self, "live_trace")
@live_trace.setter
def live_trace(self, value: Optional[pulumi.Input['ServiceLiveTraceArgs']]):
pulumi.set(self, "live_trace", value)
@property
@pulumi.getter(name="localAuthEnabled")
def local_auth_enabled(self) -> Optional[pulumi.Input[bool]]:
"""
Whether to enable local auth? Defaults to `true`.
"""
return pulumi.get(self, "local_auth_enabled")
@local_auth_enabled.setter
def local_auth_enabled(self, value: Optional[pulumi.Input[bool]]):
pulumi.set(self, "local_auth_enabled", value)
@property
@pulumi.getter
def location(self) -> Optional[pulumi.Input[str]]:
"""
Specifies the supported Azure location where the Web Pubsub service exists. Changing this
forces a new resource to be created.
"""
return pulumi.get(self, "location")
@location.setter
def location(self, value: Optional[pulumi.Input[str]]):
pulumi.set(self, "location", value)
@property
@pulumi.getter
def name(self) -> Optional[pulumi.Input[str]]:
"""
The name of the Web Pubsub service. Changing this forces a new resource to be created.
"""
return pulumi.get(self, "name")
@name.setter
def name(self, value: Optional[pulumi.Input[str]]):
pulumi.set(self, "name", value)
@property
@pulumi.getter(name="primaryAccessKey")
def primary_access_key(self) -> Optional[pulumi.Input[str]]:
"""
The primary access key for the Web Pubsub service.
"""
return pulumi.get(self, "primary_access_key")
@primary_access_key.setter
def primary_access_key(self, value: Optional[pulumi.Input[str]]):
pulumi.set(self, "primary_access_key", value)
@property
@pulumi.getter(name="primaryConnectionString")
def primary_connection_string(self) -> Optional[pulumi.Input[str]]:
"""
The primary connection string for the Web Pubsub service.
"""
return pulumi.get(self, "primary_connection_string")
@primary_connection_string.setter
def primary_connection_string(self, value: Optional[pulumi.Input[str]]):
pulumi.set(self, "primary_connection_string", value)
@property
@pulumi.getter(name="publicNetworkAccessEnabled")
def public_network_access_enabled(self) -> Optional[pulumi.Input[bool]]:
"""
Whether to enable public network access? Defaults to `true`.
"""
return pulumi.get(self, "public_network_access_enabled")
@public_network_access_enabled.setter
def public_network_access_enabled(self, value: Optional[pulumi.Input[bool]]):
pulumi.set(self, "public_network_access_enabled", value)
@property
@pulumi.getter(name="publicPort")
def public_port(self) -> Optional[pulumi.Input[int]]:
"""
The publicly accessible port of the Web Pubsub service which is designed for browser/client use.
"""
return pulumi.get(self, "public_port")
@public_port.setter
def public_port(self, value: Optional[pulumi.Input[int]]):
pulumi.set(self, "public_port", value)
@property
@pulumi.getter(name="resourceGroupName")
def resource_group_name(self) -> Optional[pulumi.Input[str]]:
"""
The name of the resource group in which to create the Web Pubsub service. Changing
this forces a new resource to be created.
"""
return pulumi.get(self, "resource_group_name")
@resource_group_name.setter
def resource_group_name(self, value: Optional[pulumi.Input[str]]):
pulumi.set(self, "resource_group_name", value)
@property
@pulumi.getter(name="secondaryAccessKey")
def secondary_access_key(self) -> Optional[pulumi.Input[str]]:
"""
The secondary access key for the Web Pubsub service.
"""
return pulumi.get(self, "secondary_access_key")
@secondary_access_key.setter
def secondary_access_key(self, value: Optional[pulumi.Input[str]]):
pulumi.set(self, "secondary_access_key", value)
@property
@pulumi.getter(name="secondaryConnectionString")
def secondary_connection_string(self) -> Optional[pulumi.Input[str]]:
"""
The secondary connection string for the Web Pubsub service.
"""
return pulumi.get(self, "secondary_connection_string")
@secondary_connection_string.setter
def secondary_connection_string(self, value: Optional[pulumi.Input[str]]):
pulumi.set(self, "secondary_connection_string", value)
@property
@pulumi.getter(name="serverPort")
def server_port(self) -> Optional[pulumi.Input[int]]:
"""
The publicly accessible port of the Web Pubsub service which is designed for customer server side use.
"""
return pulumi.get(self, "server_port")
@server_port.setter
def server_port(self, value: Optional[pulumi.Input[int]]):
pulumi.set(self, "server_port", value)
@property
@pulumi.getter
def sku(self) -> Optional[pulumi.Input[str]]:
"""
Specifies which sku to use. Possible values are `Free_F1` and `Standard_S1`.
"""
return pulumi.get(self, "sku")
@sku.setter
def sku(self, value: Optional[pulumi.Input[str]]):
pulumi.set(self, "sku", value)
@property
@pulumi.getter
def tags(self) -> Optional[pulumi.Input[Mapping[str, pulumi.Input[str]]]]:
"""
A mapping of tags to assign to the resource.
"""
return pulumi.get(self, "tags")
@tags.setter
def tags(self, value: Optional[pulumi.Input[Mapping[str, pulumi.Input[str]]]]):
pulumi.set(self, "tags", value)
@property
@pulumi.getter(name="tlsClientCertEnabled")
def tls_client_cert_enabled(self) -> Optional[pulumi.Input[bool]]:
"""
Whether to request client certificate during TLS handshake? Defaults
to `false`.
"""
return pulumi.get(self, "tls_client_cert_enabled")
@tls_client_cert_enabled.setter
def tls_client_cert_enabled(self, value: Optional[pulumi.Input[bool]]):
pulumi.set(self, "tls_client_cert_enabled", value)
@property
@pulumi.getter
def version(self) -> Optional[pulumi.Input[str]]:
return pulumi.get(self, "version")
@version.setter
def version(self, value: Optional[pulumi.Input[str]]):
pulumi.set(self, "version", value)
class Service(pulumi.CustomResource):
@overload
def __init__(__self__,
resource_name: str,
opts: Optional[pulumi.ResourceOptions] = None,
aad_auth_enabled: Optional[pulumi.Input[bool]] = None,
capacity: Optional[pulumi.Input[int]] = None,
identity: Optional[pulumi.Input[pulumi.InputType['ServiceIdentityArgs']]] = None,
live_trace: Optional[pulumi.Input[pulumi.InputType['ServiceLiveTraceArgs']]] = None,
local_auth_enabled: Optional[pulumi.Input[bool]] = None,
location: Optional[pulumi.Input[str]] = None,
name: Optional[pulumi.Input[str]] = None,
public_network_access_enabled: Optional[pulumi.Input[bool]] = None,
resource_group_name: Optional[pulumi.Input[str]] = None,
sku: Optional[pulumi.Input[str]] = None,
tags: Optional[pulumi.Input[Mapping[str, pulumi.Input[str]]]] = None,
tls_client_cert_enabled: Optional[pulumi.Input[bool]] = None,
__props__=None):
"""
Manages an Azure Web Pubsub Service.
## Example Usage
```python
import pulumi
import pulumi_azure as azure
example_resource_group = azure.core.ResourceGroup("exampleResourceGroup", location="east us")
example_service = azure.webpubsub.Service("exampleService",
location=example_resource_group.location,
resource_group_name=example_resource_group.name,
sku="Standard_S1",
capacity=1,
public_network_access_enabled=False,
live_trace=azure.webpubsub.ServiceLiveTraceArgs(
enabled=True,
messaging_logs_enabled=True,
connectivity_logs_enabled=False,
),
identity=azure.webpubsub.ServiceIdentityArgs(
type="SystemAssigned",
))
```
## Import
Web Pubsub services can be imported using the `resource id`, e.g.
```sh
$ pulumi import azure:webpubsub/service:Service example /subscriptions/00000000-0000-0000-0000-000000000000/resourceGroups/group1/providers/Microsoft.SignalRService/webPubSub/pubsub1
```
:param str resource_name: The name of the resource.
:param pulumi.ResourceOptions opts: Options for the resource.
:param pulumi.Input[bool] aad_auth_enabled: Whether to enable AAD auth? Defaults to `true`.
:param pulumi.Input[int] capacity: Specifies the number of units associated with this Web Pubsub resource. Valid values are:
Free: `1`, Standard: `1`, `2`, `5`, `10`, `20`, `50`, `100`.
:param pulumi.Input[pulumi.InputType['ServiceIdentityArgs']] identity: An `identity` block as defined below.
:param pulumi.Input[pulumi.InputType['ServiceLiveTraceArgs']] live_trace: A `live_trace` block as defined below.
:param pulumi.Input[bool] local_auth_enabled: Whether to enable local auth? Defaults to `true`.
:param pulumi.Input[str] location: Specifies the supported Azure location where the Web Pubsub service exists. Changing this
forces a new resource to be created.
:param pulumi.Input[str] name: The name of the Web Pubsub service. Changing this forces a new resource to be created.
:param pulumi.Input[bool] public_network_access_enabled: Whether to enable public network access? Defaults to `true`.
:param pulumi.Input[str] resource_group_name: The name of the resource group in which to create the Web Pubsub service. Changing
this forces a new resource to be created.
:param pulumi.Input[str] sku: Specifies which sku to use. Possible values are `Free_F1` and `Standard_S1`.
:param pulumi.Input[Mapping[str, pulumi.Input[str]]] tags: A mapping of tags to assign to the resource.
:param pulumi.Input[bool] tls_client_cert_enabled: Whether to request client certificate during TLS handshake? Defaults
to `false`.
"""
...
@overload
def __init__(__self__,
resource_name: str,
args: ServiceArgs,
opts: Optional[pulumi.ResourceOptions] = None):
"""
Manages an Azure Web Pubsub Service.
## Example Usage
```python
import pulumi
import pulumi_azure as azure
example_resource_group = azure.core.ResourceGroup("exampleResourceGroup", location="east us")
example_service = azure.webpubsub.Service("exampleService",
location=example_resource_group.location,
resource_group_name=example_resource_group.name,
sku="Standard_S1",
capacity=1,
public_network_access_enabled=False,
live_trace=azure.webpubsub.ServiceLiveTraceArgs(
enabled=True,
messaging_logs_enabled=True,
connectivity_logs_enabled=False,
),
identity=azure.webpubsub.ServiceIdentityArgs(
type="SystemAssigned",
))
```
## Import
Web Pubsub services can be imported using the `resource id`, e.g.
```sh
$ pulumi import azure:webpubsub/service:Service example /subscriptions/00000000-0000-0000-0000-000000000000/resourceGroups/group1/providers/Microsoft.SignalRService/webPubSub/pubsub1
```
:param str resource_name: The name of the resource.
:param ServiceArgs args: The arguments to use to populate this resource's properties.
:param pulumi.ResourceOptions opts: Options for the resource.
"""
...
def __init__(__self__, resource_name: str, *args, **kwargs):
resource_args, opts = _utilities.get_resource_args_opts(ServiceArgs, pulumi.ResourceOptions, *args, **kwargs)
if resource_args is not None:
__self__._internal_init(resource_name, opts, **resource_args.__dict__)
else:
__self__._internal_init(resource_name, *args, **kwargs)
def _internal_init(__self__,
resource_name: str,
opts: Optional[pulumi.ResourceOptions] = None,
aad_auth_enabled: Optional[pulumi.Input[bool]] = None,
capacity: Optional[pulumi.Input[int]] = None,
identity: Optional[pulumi.Input[pulumi.InputType['ServiceIdentityArgs']]] = None,
live_trace: Optional[pulumi.Input[pulumi.InputType['ServiceLiveTraceArgs']]] = None,
local_auth_enabled: Optional[pulumi.Input[bool]] = None,
location: Optional[pulumi.Input[str]] = None,
name: Optional[pulumi.Input[str]] = None,
public_network_access_enabled: Optional[pulumi.Input[bool]] = None,
resource_group_name: Optional[pulumi.Input[str]] = None,
sku: Optional[pulumi.Input[str]] = None,
tags: Optional[pulumi.Input[Mapping[str, pulumi.Input[str]]]] = None,
tls_client_cert_enabled: Optional[pulumi.Input[bool]] = None,
__props__=None):
if opts is None:
opts = pulumi.ResourceOptions()
if not isinstance(opts, pulumi.ResourceOptions):
raise TypeError('Expected resource options to be a ResourceOptions instance')
if opts.version is None:
opts.version = _utilities.get_version()
if opts.id is None:
if __props__ is not None:
raise TypeError('__props__ is only valid when passed in combination with a valid opts.id to get an existing resource')
__props__ = ServiceArgs.__new__(ServiceArgs)
__props__.__dict__["aad_auth_enabled"] = aad_auth_enabled
__props__.__dict__["capacity"] = capacity
__props__.__dict__["identity"] = identity
__props__.__dict__["live_trace"] = live_trace
__props__.__dict__["local_auth_enabled"] = local_auth_enabled
__props__.__dict__["location"] = location
__props__.__dict__["name"] = name
__props__.__dict__["public_network_access_enabled"] = public_network_access_enabled
if resource_group_name is None and not opts.urn:
raise TypeError("Missing required property 'resource_group_name'")
__props__.__dict__["resource_group_name"] = resource_group_name
if sku is None and not opts.urn:
raise TypeError("Missing required property 'sku'")
__props__.__dict__["sku"] = sku
__props__.__dict__["tags"] = tags
__props__.__dict__["tls_client_cert_enabled"] = tls_client_cert_enabled
__props__.__dict__["external_ip"] = None
__props__.__dict__["hostname"] = None
__props__.__dict__["primary_access_key"] = None
__props__.__dict__["primary_connection_string"] = None
__props__.__dict__["public_port"] = None
__props__.__dict__["secondary_access_key"] = None
__props__.__dict__["secondary_connection_string"] = None
__props__.__dict__["server_port"] = None
__props__.__dict__["version"] = None
super(Service, __self__).__init__(
'azure:webpubsub/service:Service',
resource_name,
__props__,
opts)
@staticmethod
def get(resource_name: str,
id: pulumi.Input[str],
opts: Optional[pulumi.ResourceOptions] = None,
aad_auth_enabled: Optional[pulumi.Input[bool]] = None,
capacity: Optional[pulumi.Input[int]] = None,
external_ip: Optional[pulumi.Input[str]] = None,
hostname: Optional[pulumi.Input[str]] = None,
identity: Optional[pulumi.Input[pulumi.InputType['ServiceIdentityArgs']]] = None,
live_trace: Optional[pulumi.Input[pulumi.InputType['ServiceLiveTraceArgs']]] = None,
local_auth_enabled: Optional[pulumi.Input[bool]] = None,
location: Optional[pulumi.Input[str]] = None,
name: Optional[pulumi.Input[str]] = None,
primary_access_key: Optional[pulumi.Input[str]] = None,
primary_connection_string: Optional[pulumi.Input[str]] = None,
public_network_access_enabled: Optional[pulumi.Input[bool]] = None,
public_port: Optional[pulumi.Input[int]] = None,
resource_group_name: Optional[pulumi.Input[str]] = None,
secondary_access_key: Optional[pulumi.Input[str]] = None,
secondary_connection_string: Optional[pulumi.Input[str]] = None,
server_port: Optional[pulumi.Input[int]] = None,
sku: Optional[pulumi.Input[str]] = None,
tags: Optional[pulumi.Input[Mapping[str, pulumi.Input[str]]]] = None,
tls_client_cert_enabled: Optional[pulumi.Input[bool]] = None,
version: Optional[pulumi.Input[str]] = None) -> 'Service':
"""
Get an existing Service resource's state with the given name, id, and optional extra
properties used to qualify the lookup.
:param str resource_name: The unique name of the resulting resource.
:param pulumi.Input[str] id: The unique provider ID of the resource to lookup.
:param pulumi.ResourceOptions opts: Options for the resource.
:param pulumi.Input[bool] aad_auth_enabled: Whether to enable AAD auth? Defaults to `true`.
:param pulumi.Input[int] capacity: Specifies the number of units associated with this Web Pubsub resource. Valid values are:
Free: `1`, Standard: `1`, `2`, `5`, `10`, `20`, `50`, `100`.
:param pulumi.Input[str] hostname: The FQDN of the Web Pubsub service.
:param pulumi.Input[pulumi.InputType['ServiceIdentityArgs']] identity: An `identity` block as defined below.
:param pulumi.Input[pulumi.InputType['ServiceLiveTraceArgs']] live_trace: A `live_trace` block as defined below.
:param pulumi.Input[bool] local_auth_enabled: Whether to enable local auth? Defaults to `true`.
:param pulumi.Input[str] location: Specifies the supported Azure location where the Web Pubsub service exists. Changing this
forces a new resource to be created.
:param pulumi.Input[str] name: The name of the Web Pubsub service. Changing this forces a new resource to be created.
:param pulumi.Input[str] primary_access_key: The primary access key for the Web Pubsub service.
:param pulumi.Input[str] primary_connection_string: The primary connection string for the Web Pubsub service.
:param pulumi.Input[bool] public_network_access_enabled: Whether to enable public network access? Defaults to `true`.
:param pulumi.Input[int] public_port: The publicly accessible port of the Web Pubsub service which is designed for browser/client use.
:param pulumi.Input[str] resource_group_name: The name of the resource group in which to create the Web Pubsub service. Changing
this forces a new resource to be created.
:param pulumi.Input[str] secondary_access_key: The secondary access key for the Web Pubsub service.
:param pulumi.Input[str] secondary_connection_string: The secondary connection string for the Web Pubsub service.
:param pulumi.Input[int] server_port: The publicly accessible port of the Web Pubsub service which is designed for customer server side use.
:param pulumi.Input[str] sku: Specifies which sku to use. Possible values are `Free_F1` and `Standard_S1`.
:param pulumi.Input[Mapping[str, pulumi.Input[str]]] tags: A mapping of tags to assign to the resource.
:param pulumi.Input[bool] tls_client_cert_enabled: Whether to request client certificate during TLS handshake? Defaults
to `false`.
"""
opts = pulumi.ResourceOptions.merge(opts, pulumi.ResourceOptions(id=id))
__props__ = _ServiceState.__new__(_ServiceState)
__props__.__dict__["aad_auth_enabled"] = aad_auth_enabled
__props__.__dict__["capacity"] = capacity
__props__.__dict__["external_ip"] = external_ip
__props__.__dict__["hostname"] = hostname
__props__.__dict__["identity"] = identity
__props__.__dict__["live_trace"] = live_trace
__props__.__dict__["local_auth_enabled"] = local_auth_enabled
__props__.__dict__["location"] = location
__props__.__dict__["name"] = name
__props__.__dict__["primary_access_key"] = primary_access_key
__props__.__dict__["primary_connection_string"] = primary_connection_string
__props__.__dict__["public_network_access_enabled"] = public_network_access_enabled
__props__.__dict__["public_port"] = public_port
__props__.__dict__["resource_group_name"] = resource_group_name
__props__.__dict__["secondary_access_key"] = secondary_access_key
__props__.__dict__["secondary_connection_string"] = secondary_connection_string
__props__.__dict__["server_port"] = server_port
__props__.__dict__["sku"] = sku
__props__.__dict__["tags"] = tags
__props__.__dict__["tls_client_cert_enabled"] = tls_client_cert_enabled
__props__.__dict__["version"] = version
return Service(resource_name, opts=opts, __props__=__props__)
@property
@pulumi.getter(name="aadAuthEnabled")
def aad_auth_enabled(self) -> pulumi.Output[Optional[bool]]:
"""
Whether to enable AAD auth? Defaults to `true`.
"""
return pulumi.get(self, "aad_auth_enabled")
@property
@pulumi.getter
def capacity(self) -> pulumi.Output[Optional[int]]:
"""
Specifies the number of units associated with this Web Pubsub resource. Valid values are:
Free: `1`, Standard: `1`, `2`, `5`, `10`, `20`, `50`, `100`.
"""
return pulumi.get(self, "capacity")
@property
@pulumi.getter(name="externalIp")
def external_ip(self) -> pulumi.Output[str]:
return pulumi.get(self, "external_ip")
@property
@pulumi.getter
def hostname(self) -> pulumi.Output[str]:
"""
The FQDN of the Web Pubsub service.
"""
return pulumi.get(self, "hostname")
@property
@pulumi.getter
def identity(self) -> pulumi.Output[Optional['outputs.ServiceIdentity']]:
"""
An `identity` block as defined below.
"""
return pulumi.get(self, "identity")
@property
@pulumi.getter(name="liveTrace")
def live_trace(self) -> pulumi.Output[Optional['outputs.ServiceLiveTrace']]:
"""
A `live_trace` block as defined below.
"""
return pulumi.get(self, "live_trace")
@property
@pulumi.getter(name="localAuthEnabled")
def local_auth_enabled(self) -> pulumi.Output[Optional[bool]]:
"""
Whether to enable local auth? Defaults to `true`.
"""
return pulumi.get(self, "local_auth_enabled")
@property
@pulumi.getter
def location(self) -> pulumi.Output[str]:
"""
Specifies the supported Azure location where the Web Pubsub service exists. Changing this
forces a new resource to be created.
"""
return pulumi.get(self, "location")
@property
@pulumi.getter
def name(self) -> pulumi.Output[str]:
"""
The name of the Web Pubsub service. Changing this forces a new resource to be created.
"""
return pulumi.get(self, "name")
@property
@pulumi.getter(name="primaryAccessKey")
def primary_access_key(self) -> pulumi.Output[str]:
"""
The primary access key for the Web Pubsub service.
"""
return pulumi.get(self, "primary_access_key")
@property
@pulumi.getter(name="primaryConnectionString")
def primary_connection_string(self) -> pulumi.Output[str]:
"""
The primary connection string for the Web Pubsub service.
"""
return pulumi.get(self, "primary_connection_string")
@property
@pulumi.getter(name="publicNetworkAccessEnabled")
def public_network_access_enabled(self) -> pulumi.Output[Optional[bool]]:
"""
Whether to enable public network access? Defaults to `true`.
"""
return pulumi.get(self, "public_network_access_enabled")
@property
@pulumi.getter(name="publicPort")
def public_port(self) -> pulumi.Output[int]:
"""
The publicly accessible port of the Web Pubsub service which is designed for browser/client use.
"""
return pulumi.get(self, "public_port")
@property
@pulumi.getter(name="resourceGroupName")
def resource_group_name(self) -> pulumi.Output[str]:
"""
The name of the resource group in which to create the Web Pubsub service. Changing
this forces a new resource to be created.
"""
return pulumi.get(self, "resource_group_name")
@property
@pulumi.getter(name="secondaryAccessKey")
def secondary_access_key(self) -> pulumi.Output[str]:
"""
The secondary access key for the Web Pubsub service.
"""
return pulumi.get(self, "secondary_access_key")
@property
@pulumi.getter(name="secondaryConnectionString")
def secondary_connection_string(self) -> pulumi.Output[str]:
"""
The secondary connection string for the Web Pubsub service.
"""
return pulumi.get(self, "secondary_connection_string")
@property
@pulumi.getter(name="serverPort")
def server_port(self) -> pulumi.Output[int]:
"""
The publicly accessible port of the Web Pubsub service which is designed for customer server side use.
"""
return pulumi.get(self, "server_port")
@property
@pulumi.getter
def sku(self) -> pulumi.Output[str]:
"""
Specifies which sku to use. Possible values are `Free_F1` and `Standard_S1`.
"""
return pulumi.get(self, "sku")
@property
@pulumi.getter
def tags(self) -> pulumi.Output[Optional[Mapping[str, str]]]:
"""
A mapping of tags to assign to the resource.
"""
return pulumi.get(self, "tags")
@property
@pulumi.getter(name="tlsClientCertEnabled")
def tls_client_cert_enabled(self) -> pulumi.Output[Optional[bool]]:
"""
Whether to request client certificate during TLS handshake? Defaults
to `false`.
"""
return pulumi.get(self, "tls_client_cert_enabled")
@property
@pulumi.getter
def version(self) -> pulumi.Output[str]:
return pulumi.get(self, "version")
| 45.073367
| 191
| 0.65363
| 5,308
| 44,848
| 5.284665
| 0.047664
| 0.087448
| 0.093473
| 0.045489
| 0.924887
| 0.902535
| 0.881609
| 0.859007
| 0.843214
| 0.820006
| 0
| 0.005238
| 0.242285
| 44,848
| 994
| 192
| 45.118712
| 0.820228
| 0.31667
| 0
| 0.727758
| 1
| 0
| 0.120525
| 0.03736
| 0
| 0
| 0
| 0
| 0
| 1
| 0.16726
| false
| 0.001779
| 0.012456
| 0.007117
| 0.282918
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 1
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 8
|
988183513b6a6bfef0a5fa57f48b03f2c9e871b0
| 4,037
|
py
|
Python
|
mayi/test_case/test_landlord_delete.py
|
18701016443/mayi
|
192c70c49a8e9e072b9d0d0136f02c653c589410
|
[
"MIT"
] | null | null | null |
mayi/test_case/test_landlord_delete.py
|
18701016443/mayi
|
192c70c49a8e9e072b9d0d0136f02c653c589410
|
[
"MIT"
] | null | null | null |
mayi/test_case/test_landlord_delete.py
|
18701016443/mayi
|
192c70c49a8e9e072b9d0d0136f02c653c589410
|
[
"MIT"
] | null | null | null |
#!/usr/bin/env python
# encoding: utf-8
"""
@version: python.3.6
@author: zhangjiaheng
@software: PyCharm
@time: 2017/9/22 10:08
"""
from test_case.page_obj import login_page,landlord_nav_page,landlord_delete_page
from models import myunit,function
from time import sleep
import unittest
class TestDelete(myunit.MyTest):
'''房源删除'''
def test_checkingroom_delete(self):
'''①审核中——删除房源'''
login_page.LoginPage(self.driver).login()
sleep(2)
landlord_nav_page.LandlordNavPage(self.driver).Iamlandlord()
sleep(2)
landlord_nav_page.LandlordNavPage(self.driver).close_weiChat()
sleep(2)
landlord_nav_page.LandlordNavPage(self.driver).roommanager()
sleep(2)
po = landlord_delete_page.LandlordDeletePage(self.driver)
po.roomfilter2()
sleep(2)
po.checking_delete()
sleep(1)
po.delBtn()
sleep(2)
print(po.delsuccess_text())
assert po.delsuccess_text()=="删除成功!"
function.insert_img(self.driver,"delete.png")
po.okDelSuccessBtn()
def test_room_delete(self):
'''已上线——删除房源'''
# login_page.LoginPage(self.driver).login()
# sleep(2)
landlord_nav_page.LandlordNavPage(self.driver).Iamlandlord()
sleep(2)
# landlord_nav_page.LandlordNavPage(self.driver).close_weiChat()
# sleep(2)
landlord_nav_page.LandlordNavPage(self.driver).roommanager()
po = landlord_delete_page.LandlordDeletePage(self.driver)
po.room_delete()
sleep(1)
po.delBtn()
sleep(2)
print(po.delsuccess_text())
assert po.delsuccess_text()=="删除成功!"
function.insert_img(self.driver,"delete.png")
po.okDelSuccessBtn()
def test_offlineroom_delete(self):
'''已下线——删除房源'''
# login_page.LoginPage(self.driver).login()
# sleep(2)
landlord_nav_page.LandlordNavPage(self.driver).Iamlandlord()
sleep(2)
# landlord_nav_page.LandlordNavPage(self.driver).close_weiChat()
# sleep(2)
landlord_nav_page.LandlordNavPage(self.driver).roommanager()
po = landlord_delete_page.LandlordDeletePage(self.driver)
po.roomfilter5()
sleep(2)
po.room_delete()
sleep(1)
po.delBtn()
sleep(2)
print(po.delsuccess_text())
assert po.delsuccess_text()=="删除成功!"
function.insert_img(self.driver,"delete.png")
po.okDelSuccessBtn()
def test_nopassroom_delete(self):
'''未通过——删除房源'''
# login_page.LoginPage(self.driver).login()
# sleep(2)
landlord_nav_page.LandlordNavPage(self.driver).Iamlandlord()
sleep(2)
# landlord_nav_page.LandlordNavPage(self.driver).close_weiChat()
# sleep(2)
landlord_nav_page.LandlordNavPage(self.driver).roommanager()
po = landlord_delete_page.LandlordDeletePage(self.driver)
po.roomfilter3()
sleep(2)
po.checking_delete()
sleep(1)
po.delBtn()
sleep(2)
print(po.delsuccess_text())
assert po.delsuccess_text()=="删除成功!"
function.insert_img(self.driver,"delete.png")
po.okDelSuccessBtn()
def test_need_complete_delete(self):
'''待完善——删除房源'''
# login_page.LoginPage(self.driver).login()
# sleep(2)
landlord_nav_page.LandlordNavPage(self.driver).Iamlandlord()
sleep(2)
# landlord_nav_page.LandlordNavPage(self.driver).close_weiChat()
# sleep(2)
landlord_nav_page.LandlordNavPage(self.driver).roommanager()
po = landlord_delete_page.LandlordDeletePage(self.driver)
po.roomfilter1()
sleep(2)
po.need_complete_delete()
sleep(1)
po.delBtn()
sleep(2)
print(po.delsuccess_text())
assert po.delsuccess_text()=="删除成功!"
function.insert_img(self.driver,"delete.png")
po.okDelSuccessBtn()
if __name__ == "__main__":
unittest.main()
| 31.053846
| 80
| 0.634877
| 456
| 4,037
| 5.429825
| 0.182018
| 0.121163
| 0.096931
| 0.102989
| 0.809774
| 0.809774
| 0.809774
| 0.809774
| 0.78958
| 0.78958
| 0
| 0.015961
| 0.239534
| 4,037
| 130
| 81
| 31.053846
| 0.787296
| 0.165717
| 0
| 0.776471
| 0
| 0
| 0.025045
| 0
| 0
| 0
| 0
| 0
| 0.058824
| 1
| 0.058824
| false
| 0.011765
| 0.047059
| 0
| 0.117647
| 0.058824
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 7
|
7f4792541f1bbe8dbaf7be668ee5ca0dd348f39f
| 99,945
|
py
|
Python
|
test/azure/Expected/AcceptanceTests/Lro/lro/aio/operations_async/_lrosa_ds_operations_async.py
|
fearthecowboy/autorest.python
|
a251e361218598b55b0621db2275aafcb7158a5c
|
[
"MIT"
] | null | null | null |
test/azure/Expected/AcceptanceTests/Lro/lro/aio/operations_async/_lrosa_ds_operations_async.py
|
fearthecowboy/autorest.python
|
a251e361218598b55b0621db2275aafcb7158a5c
|
[
"MIT"
] | null | null | null |
test/azure/Expected/AcceptanceTests/Lro/lro/aio/operations_async/_lrosa_ds_operations_async.py
|
fearthecowboy/autorest.python
|
a251e361218598b55b0621db2275aafcb7158a5c
|
[
"MIT"
] | null | null | null |
# coding=utf-8
# --------------------------------------------------------------------------
# Copyright (c) Microsoft Corporation. All rights reserved.
# Licensed under the MIT License. See License.txt in the project root for
# license information.
#
# Code generated by Microsoft (R) AutoRest Code Generator.
# Changes may cause incorrect behavior and will be lost if the code is
# regenerated.
# --------------------------------------------------------------------------
import uuid
from msrest.pipeline import ClientRawResponse
from msrestazure.azure_exceptions import CloudError
from msrest.polling.async_poller import async_poller, AsyncNoPolling
from msrestazure.polling.async_arm_polling import AsyncARMPolling
from ... import models
class LROSADsOperations:
"""LROSADsOperations async operations.
You should not instantiate directly this class, but create a Client instance that will create it for you and attach it as attribute.
:param client: Client for service requests.
:param config: Configuration of service client.
:param serializer: An object model serializer.
:param deserializer: An object model deserializer.
"""
models = models
def __init__(self, client, config, serializer, deserializer) -> None:
self._client = client
self._serialize = serializer
self._deserialize = deserializer
self.config = config
async def _put_non_retry400_initial(
self, product=None, *, custom_headers=None, raw=False, **operation_config):
# Construct URL
url = self.put_non_retry400.metadata['url']
# Construct parameters
query_parameters = {}
# Construct headers
header_parameters = {}
header_parameters['Accept'] = 'application/json'
header_parameters['Content-Type'] = 'application/json; charset=utf-8'
if self.config.generate_client_request_id:
header_parameters['x-ms-client-request-id'] = str(uuid.uuid1())
if custom_headers:
header_parameters.update(custom_headers)
if self.config.accept_language is not None:
header_parameters['accept-language'] = self._serialize.header("self.config.accept_language", self.config.accept_language, 'str')
# Construct body
if product is not None:
body_content = self._serialize.body(product, 'Product')
else:
body_content = None
# Construct and send request
request = self._client.put(url, query_parameters, header_parameters, body_content)
response = await self._client.async_send(request, stream=False, **operation_config)
if response.status_code not in [200, 201]:
exp = CloudError(response)
exp.request_id = response.headers.get('x-ms-request-id')
raise exp
deserialized = None
if response.status_code == 200:
deserialized = self._deserialize('Product', response)
if response.status_code == 201:
deserialized = self._deserialize('Product', response)
if raw:
client_raw_response = ClientRawResponse(deserialized, response)
return client_raw_response
return deserialized
async def put_non_retry400(
self, product=None, *, custom_headers=None, raw=False, polling=True, **operation_config):
"""Long running put request, service returns a 400 to the initial request.
:param product: Product to put
:type product: ~lro.models.Product
:param dict custom_headers: headers that will be added to the request
:param bool raw: The poller return type is ClientRawResponse, the
direct response alongside the deserialized response
:param polling: True for AsyncARMPolling, False for no polling, or a
polling object for personal polling strategy
:return: An instance of Product or ClientRawResponse<Product> if
raw==True
:rtype: ~~lro.models.Product or
~msrest.pipeline.ClientRawResponse[~lro.models.Product]
:raises: :class:`CloudError<msrestazure.azure_exceptions.CloudError>`
"""
raw_result = await self._put_non_retry400_initial(
product=product,
custom_headers=custom_headers,
raw=True,
**operation_config
)
def get_long_running_output(response):
deserialized = self._deserialize('Product', response)
if raw:
client_raw_response = ClientRawResponse(deserialized, response)
return client_raw_response
return deserialized
lro_delay = operation_config.get(
'long_running_operation_timeout',
self.config.long_running_operation_timeout)
if polling is True: polling_method = AsyncARMPolling(lro_delay, **operation_config)
elif polling is False: polling_method = AsyncNoPolling()
else: polling_method = polling
return await async_poller(self._client, raw_result, get_long_running_output, polling_method)
put_non_retry400.metadata = {'url': '/lro/nonretryerror/put/400'}
async def _put_non_retry201_creating400_initial(
self, product=None, *, custom_headers=None, raw=False, **operation_config):
# Construct URL
url = self.put_non_retry201_creating400.metadata['url']
# Construct parameters
query_parameters = {}
# Construct headers
header_parameters = {}
header_parameters['Accept'] = 'application/json'
header_parameters['Content-Type'] = 'application/json; charset=utf-8'
if self.config.generate_client_request_id:
header_parameters['x-ms-client-request-id'] = str(uuid.uuid1())
if custom_headers:
header_parameters.update(custom_headers)
if self.config.accept_language is not None:
header_parameters['accept-language'] = self._serialize.header("self.config.accept_language", self.config.accept_language, 'str')
# Construct body
if product is not None:
body_content = self._serialize.body(product, 'Product')
else:
body_content = None
# Construct and send request
request = self._client.put(url, query_parameters, header_parameters, body_content)
response = await self._client.async_send(request, stream=False, **operation_config)
if response.status_code not in [200, 201]:
exp = CloudError(response)
exp.request_id = response.headers.get('x-ms-request-id')
raise exp
deserialized = None
if response.status_code == 200:
deserialized = self._deserialize('Product', response)
if response.status_code == 201:
deserialized = self._deserialize('Product', response)
if raw:
client_raw_response = ClientRawResponse(deserialized, response)
return client_raw_response
return deserialized
async def put_non_retry201_creating400(
self, product=None, *, custom_headers=None, raw=False, polling=True, **operation_config):
"""Long running put request, service returns a Product with
'ProvisioningState' = 'Creating' and 201 response code.
:param product: Product to put
:type product: ~lro.models.Product
:param dict custom_headers: headers that will be added to the request
:param bool raw: The poller return type is ClientRawResponse, the
direct response alongside the deserialized response
:param polling: True for AsyncARMPolling, False for no polling, or a
polling object for personal polling strategy
:return: An instance of Product or ClientRawResponse<Product> if
raw==True
:rtype: ~~lro.models.Product or
~msrest.pipeline.ClientRawResponse[~lro.models.Product]
:raises: :class:`CloudError<msrestazure.azure_exceptions.CloudError>`
"""
raw_result = await self._put_non_retry201_creating400_initial(
product=product,
custom_headers=custom_headers,
raw=True,
**operation_config
)
def get_long_running_output(response):
deserialized = self._deserialize('Product', response)
if raw:
client_raw_response = ClientRawResponse(deserialized, response)
return client_raw_response
return deserialized
lro_delay = operation_config.get(
'long_running_operation_timeout',
self.config.long_running_operation_timeout)
if polling is True: polling_method = AsyncARMPolling(lro_delay, **operation_config)
elif polling is False: polling_method = AsyncNoPolling()
else: polling_method = polling
return await async_poller(self._client, raw_result, get_long_running_output, polling_method)
put_non_retry201_creating400.metadata = {'url': '/lro/nonretryerror/put/201/creating/400'}
async def _put_non_retry201_creating400_invalid_json_initial(
self, product=None, *, custom_headers=None, raw=False, **operation_config):
# Construct URL
url = self.put_non_retry201_creating400_invalid_json.metadata['url']
# Construct parameters
query_parameters = {}
# Construct headers
header_parameters = {}
header_parameters['Accept'] = 'application/json'
header_parameters['Content-Type'] = 'application/json; charset=utf-8'
if self.config.generate_client_request_id:
header_parameters['x-ms-client-request-id'] = str(uuid.uuid1())
if custom_headers:
header_parameters.update(custom_headers)
if self.config.accept_language is not None:
header_parameters['accept-language'] = self._serialize.header("self.config.accept_language", self.config.accept_language, 'str')
# Construct body
if product is not None:
body_content = self._serialize.body(product, 'Product')
else:
body_content = None
# Construct and send request
request = self._client.put(url, query_parameters, header_parameters, body_content)
response = await self._client.async_send(request, stream=False, **operation_config)
if response.status_code not in [200, 201]:
exp = CloudError(response)
exp.request_id = response.headers.get('x-ms-request-id')
raise exp
deserialized = None
if response.status_code == 200:
deserialized = self._deserialize('Product', response)
if response.status_code == 201:
deserialized = self._deserialize('Product', response)
if raw:
client_raw_response = ClientRawResponse(deserialized, response)
return client_raw_response
return deserialized
async def put_non_retry201_creating400_invalid_json(
self, product=None, *, custom_headers=None, raw=False, polling=True, **operation_config):
"""Long running put request, service returns a Product with
'ProvisioningState' = 'Creating' and 201 response code.
:param product: Product to put
:type product: ~lro.models.Product
:param dict custom_headers: headers that will be added to the request
:param bool raw: The poller return type is ClientRawResponse, the
direct response alongside the deserialized response
:param polling: True for AsyncARMPolling, False for no polling, or a
polling object for personal polling strategy
:return: An instance of Product or ClientRawResponse<Product> if
raw==True
:rtype: ~~lro.models.Product or
~msrest.pipeline.ClientRawResponse[~lro.models.Product]
:raises: :class:`CloudError<msrestazure.azure_exceptions.CloudError>`
"""
raw_result = await self._put_non_retry201_creating400_invalid_json_initial(
product=product,
custom_headers=custom_headers,
raw=True,
**operation_config
)
def get_long_running_output(response):
deserialized = self._deserialize('Product', response)
if raw:
client_raw_response = ClientRawResponse(deserialized, response)
return client_raw_response
return deserialized
lro_delay = operation_config.get(
'long_running_operation_timeout',
self.config.long_running_operation_timeout)
if polling is True: polling_method = AsyncARMPolling(lro_delay, **operation_config)
elif polling is False: polling_method = AsyncNoPolling()
else: polling_method = polling
return await async_poller(self._client, raw_result, get_long_running_output, polling_method)
put_non_retry201_creating400_invalid_json.metadata = {'url': '/lro/nonretryerror/put/201/creating/400/invalidjson'}
async def _put_async_relative_retry400_initial(
self, product=None, *, custom_headers=None, raw=False, **operation_config):
# Construct URL
url = self.put_async_relative_retry400.metadata['url']
# Construct parameters
query_parameters = {}
# Construct headers
header_parameters = {}
header_parameters['Accept'] = 'application/json'
header_parameters['Content-Type'] = 'application/json; charset=utf-8'
if self.config.generate_client_request_id:
header_parameters['x-ms-client-request-id'] = str(uuid.uuid1())
if custom_headers:
header_parameters.update(custom_headers)
if self.config.accept_language is not None:
header_parameters['accept-language'] = self._serialize.header("self.config.accept_language", self.config.accept_language, 'str')
# Construct body
if product is not None:
body_content = self._serialize.body(product, 'Product')
else:
body_content = None
# Construct and send request
request = self._client.put(url, query_parameters, header_parameters, body_content)
response = await self._client.async_send(request, stream=False, **operation_config)
if response.status_code not in [200]:
exp = CloudError(response)
exp.request_id = response.headers.get('x-ms-request-id')
raise exp
deserialized = None
header_dict = {}
if response.status_code == 200:
deserialized = self._deserialize('Product', response)
header_dict = {
'Azure-AsyncOperation': 'str',
'Location': 'str',
'Retry-After': 'int',
}
if raw:
client_raw_response = ClientRawResponse(deserialized, response)
client_raw_response.add_headers(header_dict)
return client_raw_response
return deserialized
async def put_async_relative_retry400(
self, product=None, *, custom_headers=None, raw=False, polling=True, **operation_config):
"""Long running put request, service returns a 200 with
ProvisioningState=’Creating’. Poll the endpoint indicated in the
Azure-AsyncOperation header for operation status.
:param product: Product to put
:type product: ~lro.models.Product
:param dict custom_headers: headers that will be added to the request
:param bool raw: The poller return type is ClientRawResponse, the
direct response alongside the deserialized response
:param polling: True for AsyncARMPolling, False for no polling, or a
polling object for personal polling strategy
:return: An instance of Product or ClientRawResponse<Product> if
raw==True
:rtype: ~~lro.models.Product or
~msrest.pipeline.ClientRawResponse[~lro.models.Product]
:raises: :class:`CloudError<msrestazure.azure_exceptions.CloudError>`
"""
raw_result = await self._put_async_relative_retry400_initial(
product=product,
custom_headers=custom_headers,
raw=True,
**operation_config
)
def get_long_running_output(response):
header_dict = {
'Azure-AsyncOperation': 'str',
'Location': 'str',
'Retry-After': 'int',
}
deserialized = self._deserialize('Product', response)
if raw:
client_raw_response = ClientRawResponse(deserialized, response)
client_raw_response.add_headers(header_dict)
return client_raw_response
return deserialized
lro_delay = operation_config.get(
'long_running_operation_timeout',
self.config.long_running_operation_timeout)
if polling is True: polling_method = AsyncARMPolling(lro_delay, **operation_config)
elif polling is False: polling_method = AsyncNoPolling()
else: polling_method = polling
return await async_poller(self._client, raw_result, get_long_running_output, polling_method)
put_async_relative_retry400.metadata = {'url': '/lro/nonretryerror/putasync/retry/400'}
async def _delete_non_retry400_initial(
self, *, custom_headers=None, raw=False, **operation_config):
# Construct URL
url = self.delete_non_retry400.metadata['url']
# Construct parameters
query_parameters = {}
# Construct headers
header_parameters = {}
if self.config.generate_client_request_id:
header_parameters['x-ms-client-request-id'] = str(uuid.uuid1())
if custom_headers:
header_parameters.update(custom_headers)
if self.config.accept_language is not None:
header_parameters['accept-language'] = self._serialize.header("self.config.accept_language", self.config.accept_language, 'str')
# Construct and send request
request = self._client.delete(url, query_parameters, header_parameters)
response = await self._client.async_send(request, stream=False, **operation_config)
if response.status_code not in [202]:
exp = CloudError(response)
exp.request_id = response.headers.get('x-ms-request-id')
raise exp
if raw:
client_raw_response = ClientRawResponse(None, response)
header_dict = {
'Location': 'str',
'Retry-After': 'int',
}
client_raw_response.add_headers(header_dict)
return client_raw_response
async def delete_non_retry400(
self, *, custom_headers=None, raw=False, polling=True, **operation_config):
"""Long running delete request, service returns a 400 with an error body.
:param dict custom_headers: headers that will be added to the request
:param bool raw: The poller return type is ClientRawResponse, the
direct response alongside the deserialized response
:param polling: True for AsyncARMPolling, False for no polling, or a
polling object for personal polling strategy
:return: An instance of None or ClientRawResponse<None> if raw==True
:rtype: ~None or ~msrest.pipeline.ClientRawResponse[None]
:raises: :class:`CloudError<msrestazure.azure_exceptions.CloudError>`
"""
raw_result = await self._delete_non_retry400_initial(
custom_headers=custom_headers,
raw=True,
**operation_config
)
def get_long_running_output(response):
if raw:
client_raw_response = ClientRawResponse(None, response)
client_raw_response.add_headers({
'Location': 'str',
'Retry-After': 'int',
})
return client_raw_response
lro_delay = operation_config.get(
'long_running_operation_timeout',
self.config.long_running_operation_timeout)
if polling is True: polling_method = AsyncARMPolling(lro_delay, **operation_config)
elif polling is False: polling_method = AsyncNoPolling()
else: polling_method = polling
return await async_poller(self._client, raw_result, get_long_running_output, polling_method)
delete_non_retry400.metadata = {'url': '/lro/nonretryerror/delete/400'}
async def _delete202_non_retry400_initial(
self, *, custom_headers=None, raw=False, **operation_config):
# Construct URL
url = self.delete202_non_retry400.metadata['url']
# Construct parameters
query_parameters = {}
# Construct headers
header_parameters = {}
if self.config.generate_client_request_id:
header_parameters['x-ms-client-request-id'] = str(uuid.uuid1())
if custom_headers:
header_parameters.update(custom_headers)
if self.config.accept_language is not None:
header_parameters['accept-language'] = self._serialize.header("self.config.accept_language", self.config.accept_language, 'str')
# Construct and send request
request = self._client.delete(url, query_parameters, header_parameters)
response = await self._client.async_send(request, stream=False, **operation_config)
if response.status_code not in [202]:
exp = CloudError(response)
exp.request_id = response.headers.get('x-ms-request-id')
raise exp
if raw:
client_raw_response = ClientRawResponse(None, response)
header_dict = {
'Location': 'str',
'Retry-After': 'int',
}
client_raw_response.add_headers(header_dict)
return client_raw_response
async def delete202_non_retry400(
self, *, custom_headers=None, raw=False, polling=True, **operation_config):
"""Long running delete request, service returns a 202 with a location
header.
:param dict custom_headers: headers that will be added to the request
:param bool raw: The poller return type is ClientRawResponse, the
direct response alongside the deserialized response
:param polling: True for AsyncARMPolling, False for no polling, or a
polling object for personal polling strategy
:return: An instance of None or ClientRawResponse<None> if raw==True
:rtype: ~None or ~msrest.pipeline.ClientRawResponse[None]
:raises: :class:`CloudError<msrestazure.azure_exceptions.CloudError>`
"""
raw_result = await self._delete202_non_retry400_initial(
custom_headers=custom_headers,
raw=True,
**operation_config
)
def get_long_running_output(response):
if raw:
client_raw_response = ClientRawResponse(None, response)
client_raw_response.add_headers({
'Location': 'str',
'Retry-After': 'int',
})
return client_raw_response
lro_delay = operation_config.get(
'long_running_operation_timeout',
self.config.long_running_operation_timeout)
if polling is True: polling_method = AsyncARMPolling(lro_delay, **operation_config)
elif polling is False: polling_method = AsyncNoPolling()
else: polling_method = polling
return await async_poller(self._client, raw_result, get_long_running_output, polling_method)
delete202_non_retry400.metadata = {'url': '/lro/nonretryerror/delete/202/retry/400'}
async def _delete_async_relative_retry400_initial(
self, *, custom_headers=None, raw=False, **operation_config):
# Construct URL
url = self.delete_async_relative_retry400.metadata['url']
# Construct parameters
query_parameters = {}
# Construct headers
header_parameters = {}
if self.config.generate_client_request_id:
header_parameters['x-ms-client-request-id'] = str(uuid.uuid1())
if custom_headers:
header_parameters.update(custom_headers)
if self.config.accept_language is not None:
header_parameters['accept-language'] = self._serialize.header("self.config.accept_language", self.config.accept_language, 'str')
# Construct and send request
request = self._client.delete(url, query_parameters, header_parameters)
response = await self._client.async_send(request, stream=False, **operation_config)
if response.status_code not in [202]:
exp = CloudError(response)
exp.request_id = response.headers.get('x-ms-request-id')
raise exp
if raw:
client_raw_response = ClientRawResponse(None, response)
header_dict = {
'Azure-AsyncOperation': 'str',
'Location': 'str',
'Retry-After': 'int',
}
client_raw_response.add_headers(header_dict)
return client_raw_response
async def delete_async_relative_retry400(
self, *, custom_headers=None, raw=False, polling=True, **operation_config):
"""Long running delete request, service returns a 202 to the initial
request. Poll the endpoint indicated in the Azure-AsyncOperation header
for operation status.
:param dict custom_headers: headers that will be added to the request
:param bool raw: The poller return type is ClientRawResponse, the
direct response alongside the deserialized response
:param polling: True for AsyncARMPolling, False for no polling, or a
polling object for personal polling strategy
:return: An instance of None or ClientRawResponse<None> if raw==True
:rtype: ~None or ~msrest.pipeline.ClientRawResponse[None]
:raises: :class:`CloudError<msrestazure.azure_exceptions.CloudError>`
"""
raw_result = await self._delete_async_relative_retry400_initial(
custom_headers=custom_headers,
raw=True,
**operation_config
)
def get_long_running_output(response):
if raw:
client_raw_response = ClientRawResponse(None, response)
client_raw_response.add_headers({
'Azure-AsyncOperation': 'str',
'Location': 'str',
'Retry-After': 'int',
})
return client_raw_response
lro_delay = operation_config.get(
'long_running_operation_timeout',
self.config.long_running_operation_timeout)
if polling is True: polling_method = AsyncARMPolling(lro_delay, **operation_config)
elif polling is False: polling_method = AsyncNoPolling()
else: polling_method = polling
return await async_poller(self._client, raw_result, get_long_running_output, polling_method)
delete_async_relative_retry400.metadata = {'url': '/lro/nonretryerror/deleteasync/retry/400'}
async def _post_non_retry400_initial(
self, product=None, *, custom_headers=None, raw=False, **operation_config):
# Construct URL
url = self.post_non_retry400.metadata['url']
# Construct parameters
query_parameters = {}
# Construct headers
header_parameters = {}
header_parameters['Content-Type'] = 'application/json; charset=utf-8'
if self.config.generate_client_request_id:
header_parameters['x-ms-client-request-id'] = str(uuid.uuid1())
if custom_headers:
header_parameters.update(custom_headers)
if self.config.accept_language is not None:
header_parameters['accept-language'] = self._serialize.header("self.config.accept_language", self.config.accept_language, 'str')
# Construct body
if product is not None:
body_content = self._serialize.body(product, 'Product')
else:
body_content = None
# Construct and send request
request = self._client.post(url, query_parameters, header_parameters, body_content)
response = await self._client.async_send(request, stream=False, **operation_config)
if response.status_code not in [202]:
exp = CloudError(response)
exp.request_id = response.headers.get('x-ms-request-id')
raise exp
if raw:
client_raw_response = ClientRawResponse(None, response)
header_dict = {
'Location': 'str',
'Retry-After': 'int',
}
client_raw_response.add_headers(header_dict)
return client_raw_response
async def post_non_retry400(
self, product=None, *, custom_headers=None, raw=False, polling=True, **operation_config):
"""Long running post request, service returns a 400 with no error body.
:param product: Product to put
:type product: ~lro.models.Product
:param dict custom_headers: headers that will be added to the request
:param bool raw: The poller return type is ClientRawResponse, the
direct response alongside the deserialized response
:param polling: True for AsyncARMPolling, False for no polling, or a
polling object for personal polling strategy
:return: An instance of None or ClientRawResponse<None> if raw==True
:rtype: ~None or ~msrest.pipeline.ClientRawResponse[None]
:raises: :class:`CloudError<msrestazure.azure_exceptions.CloudError>`
"""
raw_result = await self._post_non_retry400_initial(
product=product,
custom_headers=custom_headers,
raw=True,
**operation_config
)
def get_long_running_output(response):
if raw:
client_raw_response = ClientRawResponse(None, response)
client_raw_response.add_headers({
'Location': 'str',
'Retry-After': 'int',
})
return client_raw_response
lro_delay = operation_config.get(
'long_running_operation_timeout',
self.config.long_running_operation_timeout)
if polling is True: polling_method = AsyncARMPolling(lro_delay, **operation_config)
elif polling is False: polling_method = AsyncNoPolling()
else: polling_method = polling
return await async_poller(self._client, raw_result, get_long_running_output, polling_method)
post_non_retry400.metadata = {'url': '/lro/nonretryerror/post/400'}
async def _post202_non_retry400_initial(
self, product=None, *, custom_headers=None, raw=False, **operation_config):
# Construct URL
url = self.post202_non_retry400.metadata['url']
# Construct parameters
query_parameters = {}
# Construct headers
header_parameters = {}
header_parameters['Content-Type'] = 'application/json; charset=utf-8'
if self.config.generate_client_request_id:
header_parameters['x-ms-client-request-id'] = str(uuid.uuid1())
if custom_headers:
header_parameters.update(custom_headers)
if self.config.accept_language is not None:
header_parameters['accept-language'] = self._serialize.header("self.config.accept_language", self.config.accept_language, 'str')
# Construct body
if product is not None:
body_content = self._serialize.body(product, 'Product')
else:
body_content = None
# Construct and send request
request = self._client.post(url, query_parameters, header_parameters, body_content)
response = await self._client.async_send(request, stream=False, **operation_config)
if response.status_code not in [202]:
exp = CloudError(response)
exp.request_id = response.headers.get('x-ms-request-id')
raise exp
if raw:
client_raw_response = ClientRawResponse(None, response)
header_dict = {
'Location': 'str',
'Retry-After': 'int',
}
client_raw_response.add_headers(header_dict)
return client_raw_response
async def post202_non_retry400(
self, product=None, *, custom_headers=None, raw=False, polling=True, **operation_config):
"""Long running post request, service returns a 202 with a location
header.
:param product: Product to put
:type product: ~lro.models.Product
:param dict custom_headers: headers that will be added to the request
:param bool raw: The poller return type is ClientRawResponse, the
direct response alongside the deserialized response
:param polling: True for AsyncARMPolling, False for no polling, or a
polling object for personal polling strategy
:return: An instance of None or ClientRawResponse<None> if raw==True
:rtype: ~None or ~msrest.pipeline.ClientRawResponse[None]
:raises: :class:`CloudError<msrestazure.azure_exceptions.CloudError>`
"""
raw_result = await self._post202_non_retry400_initial(
product=product,
custom_headers=custom_headers,
raw=True,
**operation_config
)
def get_long_running_output(response):
if raw:
client_raw_response = ClientRawResponse(None, response)
client_raw_response.add_headers({
'Location': 'str',
'Retry-After': 'int',
})
return client_raw_response
lro_delay = operation_config.get(
'long_running_operation_timeout',
self.config.long_running_operation_timeout)
if polling is True: polling_method = AsyncARMPolling(lro_delay, **operation_config)
elif polling is False: polling_method = AsyncNoPolling()
else: polling_method = polling
return await async_poller(self._client, raw_result, get_long_running_output, polling_method)
post202_non_retry400.metadata = {'url': '/lro/nonretryerror/post/202/retry/400'}
async def _post_async_relative_retry400_initial(
self, product=None, *, custom_headers=None, raw=False, **operation_config):
# Construct URL
url = self.post_async_relative_retry400.metadata['url']
# Construct parameters
query_parameters = {}
# Construct headers
header_parameters = {}
header_parameters['Content-Type'] = 'application/json; charset=utf-8'
if self.config.generate_client_request_id:
header_parameters['x-ms-client-request-id'] = str(uuid.uuid1())
if custom_headers:
header_parameters.update(custom_headers)
if self.config.accept_language is not None:
header_parameters['accept-language'] = self._serialize.header("self.config.accept_language", self.config.accept_language, 'str')
# Construct body
if product is not None:
body_content = self._serialize.body(product, 'Product')
else:
body_content = None
# Construct and send request
request = self._client.post(url, query_parameters, header_parameters, body_content)
response = await self._client.async_send(request, stream=False, **operation_config)
if response.status_code not in [202]:
exp = CloudError(response)
exp.request_id = response.headers.get('x-ms-request-id')
raise exp
if raw:
client_raw_response = ClientRawResponse(None, response)
header_dict = {
'Azure-AsyncOperation': 'str',
'Location': 'str',
'Retry-After': 'int',
}
client_raw_response.add_headers(header_dict)
return client_raw_response
async def post_async_relative_retry400(
self, product=None, *, custom_headers=None, raw=False, polling=True, **operation_config):
"""Long running post request, service returns a 202 to the initial request
Poll the endpoint indicated in the Azure-AsyncOperation header for
operation status.
:param product: Product to put
:type product: ~lro.models.Product
:param dict custom_headers: headers that will be added to the request
:param bool raw: The poller return type is ClientRawResponse, the
direct response alongside the deserialized response
:param polling: True for AsyncARMPolling, False for no polling, or a
polling object for personal polling strategy
:return: An instance of None or ClientRawResponse<None> if raw==True
:rtype: ~None or ~msrest.pipeline.ClientRawResponse[None]
:raises: :class:`CloudError<msrestazure.azure_exceptions.CloudError>`
"""
raw_result = await self._post_async_relative_retry400_initial(
product=product,
custom_headers=custom_headers,
raw=True,
**operation_config
)
def get_long_running_output(response):
if raw:
client_raw_response = ClientRawResponse(None, response)
client_raw_response.add_headers({
'Azure-AsyncOperation': 'str',
'Location': 'str',
'Retry-After': 'int',
})
return client_raw_response
lro_delay = operation_config.get(
'long_running_operation_timeout',
self.config.long_running_operation_timeout)
if polling is True: polling_method = AsyncARMPolling(lro_delay, **operation_config)
elif polling is False: polling_method = AsyncNoPolling()
else: polling_method = polling
return await async_poller(self._client, raw_result, get_long_running_output, polling_method)
post_async_relative_retry400.metadata = {'url': '/lro/nonretryerror/postasync/retry/400'}
async def _put_error201_no_provisioning_state_payload_initial(
self, product=None, *, custom_headers=None, raw=False, **operation_config):
# Construct URL
url = self.put_error201_no_provisioning_state_payload.metadata['url']
# Construct parameters
query_parameters = {}
# Construct headers
header_parameters = {}
header_parameters['Accept'] = 'application/json'
header_parameters['Content-Type'] = 'application/json; charset=utf-8'
if self.config.generate_client_request_id:
header_parameters['x-ms-client-request-id'] = str(uuid.uuid1())
if custom_headers:
header_parameters.update(custom_headers)
if self.config.accept_language is not None:
header_parameters['accept-language'] = self._serialize.header("self.config.accept_language", self.config.accept_language, 'str')
# Construct body
if product is not None:
body_content = self._serialize.body(product, 'Product')
else:
body_content = None
# Construct and send request
request = self._client.put(url, query_parameters, header_parameters, body_content)
response = await self._client.async_send(request, stream=False, **operation_config)
if response.status_code not in [200, 201]:
exp = CloudError(response)
exp.request_id = response.headers.get('x-ms-request-id')
raise exp
deserialized = None
if response.status_code == 200:
deserialized = self._deserialize('Product', response)
if response.status_code == 201:
deserialized = self._deserialize('Product', response)
if raw:
client_raw_response = ClientRawResponse(deserialized, response)
return client_raw_response
return deserialized
async def put_error201_no_provisioning_state_payload(
self, product=None, *, custom_headers=None, raw=False, polling=True, **operation_config):
"""Long running put request, service returns a 201 to the initial request
with no payload.
:param product: Product to put
:type product: ~lro.models.Product
:param dict custom_headers: headers that will be added to the request
:param bool raw: The poller return type is ClientRawResponse, the
direct response alongside the deserialized response
:param polling: True for AsyncARMPolling, False for no polling, or a
polling object for personal polling strategy
:return: An instance of Product or ClientRawResponse<Product> if
raw==True
:rtype: ~~lro.models.Product or
~msrest.pipeline.ClientRawResponse[~lro.models.Product]
:raises: :class:`CloudError<msrestazure.azure_exceptions.CloudError>`
"""
raw_result = await self._put_error201_no_provisioning_state_payload_initial(
product=product,
custom_headers=custom_headers,
raw=True,
**operation_config
)
def get_long_running_output(response):
deserialized = self._deserialize('Product', response)
if raw:
client_raw_response = ClientRawResponse(deserialized, response)
return client_raw_response
return deserialized
lro_delay = operation_config.get(
'long_running_operation_timeout',
self.config.long_running_operation_timeout)
if polling is True: polling_method = AsyncARMPolling(lro_delay, **operation_config)
elif polling is False: polling_method = AsyncNoPolling()
else: polling_method = polling
return await async_poller(self._client, raw_result, get_long_running_output, polling_method)
put_error201_no_provisioning_state_payload.metadata = {'url': '/lro/error/put/201/noprovisioningstatepayload'}
async def _put_async_relative_retry_no_status_initial(
self, product=None, *, custom_headers=None, raw=False, **operation_config):
# Construct URL
url = self.put_async_relative_retry_no_status.metadata['url']
# Construct parameters
query_parameters = {}
# Construct headers
header_parameters = {}
header_parameters['Accept'] = 'application/json'
header_parameters['Content-Type'] = 'application/json; charset=utf-8'
if self.config.generate_client_request_id:
header_parameters['x-ms-client-request-id'] = str(uuid.uuid1())
if custom_headers:
header_parameters.update(custom_headers)
if self.config.accept_language is not None:
header_parameters['accept-language'] = self._serialize.header("self.config.accept_language", self.config.accept_language, 'str')
# Construct body
if product is not None:
body_content = self._serialize.body(product, 'Product')
else:
body_content = None
# Construct and send request
request = self._client.put(url, query_parameters, header_parameters, body_content)
response = await self._client.async_send(request, stream=False, **operation_config)
if response.status_code not in [200]:
exp = CloudError(response)
exp.request_id = response.headers.get('x-ms-request-id')
raise exp
deserialized = None
header_dict = {}
if response.status_code == 200:
deserialized = self._deserialize('Product', response)
header_dict = {
'Azure-AsyncOperation': 'str',
'Location': 'str',
'Retry-After': 'int',
}
if raw:
client_raw_response = ClientRawResponse(deserialized, response)
client_raw_response.add_headers(header_dict)
return client_raw_response
return deserialized
async def put_async_relative_retry_no_status(
self, product=None, *, custom_headers=None, raw=False, polling=True, **operation_config):
"""Long running put request, service returns a 200 to the initial request,
with an entity that contains ProvisioningState=’Creating’. Poll the
endpoint indicated in the Azure-AsyncOperation header for operation
status.
:param product: Product to put
:type product: ~lro.models.Product
:param dict custom_headers: headers that will be added to the request
:param bool raw: The poller return type is ClientRawResponse, the
direct response alongside the deserialized response
:param polling: True for AsyncARMPolling, False for no polling, or a
polling object for personal polling strategy
:return: An instance of Product or ClientRawResponse<Product> if
raw==True
:rtype: ~~lro.models.Product or
~msrest.pipeline.ClientRawResponse[~lro.models.Product]
:raises: :class:`CloudError<msrestazure.azure_exceptions.CloudError>`
"""
raw_result = await self._put_async_relative_retry_no_status_initial(
product=product,
custom_headers=custom_headers,
raw=True,
**operation_config
)
def get_long_running_output(response):
header_dict = {
'Azure-AsyncOperation': 'str',
'Location': 'str',
'Retry-After': 'int',
}
deserialized = self._deserialize('Product', response)
if raw:
client_raw_response = ClientRawResponse(deserialized, response)
client_raw_response.add_headers(header_dict)
return client_raw_response
return deserialized
lro_delay = operation_config.get(
'long_running_operation_timeout',
self.config.long_running_operation_timeout)
if polling is True: polling_method = AsyncARMPolling(lro_delay, **operation_config)
elif polling is False: polling_method = AsyncNoPolling()
else: polling_method = polling
return await async_poller(self._client, raw_result, get_long_running_output, polling_method)
put_async_relative_retry_no_status.metadata = {'url': '/lro/error/putasync/retry/nostatus'}
async def _put_async_relative_retry_no_status_payload_initial(
self, product=None, *, custom_headers=None, raw=False, **operation_config):
# Construct URL
url = self.put_async_relative_retry_no_status_payload.metadata['url']
# Construct parameters
query_parameters = {}
# Construct headers
header_parameters = {}
header_parameters['Accept'] = 'application/json'
header_parameters['Content-Type'] = 'application/json; charset=utf-8'
if self.config.generate_client_request_id:
header_parameters['x-ms-client-request-id'] = str(uuid.uuid1())
if custom_headers:
header_parameters.update(custom_headers)
if self.config.accept_language is not None:
header_parameters['accept-language'] = self._serialize.header("self.config.accept_language", self.config.accept_language, 'str')
# Construct body
if product is not None:
body_content = self._serialize.body(product, 'Product')
else:
body_content = None
# Construct and send request
request = self._client.put(url, query_parameters, header_parameters, body_content)
response = await self._client.async_send(request, stream=False, **operation_config)
if response.status_code not in [200]:
exp = CloudError(response)
exp.request_id = response.headers.get('x-ms-request-id')
raise exp
deserialized = None
header_dict = {}
if response.status_code == 200:
deserialized = self._deserialize('Product', response)
header_dict = {
'Azure-AsyncOperation': 'str',
'Location': 'str',
'Retry-After': 'int',
}
if raw:
client_raw_response = ClientRawResponse(deserialized, response)
client_raw_response.add_headers(header_dict)
return client_raw_response
return deserialized
async def put_async_relative_retry_no_status_payload(
self, product=None, *, custom_headers=None, raw=False, polling=True, **operation_config):
"""Long running put request, service returns a 200 to the initial request,
with an entity that contains ProvisioningState=’Creating’. Poll the
endpoint indicated in the Azure-AsyncOperation header for operation
status.
:param product: Product to put
:type product: ~lro.models.Product
:param dict custom_headers: headers that will be added to the request
:param bool raw: The poller return type is ClientRawResponse, the
direct response alongside the deserialized response
:param polling: True for AsyncARMPolling, False for no polling, or a
polling object for personal polling strategy
:return: An instance of Product or ClientRawResponse<Product> if
raw==True
:rtype: ~~lro.models.Product or
~msrest.pipeline.ClientRawResponse[~lro.models.Product]
:raises: :class:`CloudError<msrestazure.azure_exceptions.CloudError>`
"""
raw_result = await self._put_async_relative_retry_no_status_payload_initial(
product=product,
custom_headers=custom_headers,
raw=True,
**operation_config
)
def get_long_running_output(response):
header_dict = {
'Azure-AsyncOperation': 'str',
'Location': 'str',
'Retry-After': 'int',
}
deserialized = self._deserialize('Product', response)
if raw:
client_raw_response = ClientRawResponse(deserialized, response)
client_raw_response.add_headers(header_dict)
return client_raw_response
return deserialized
lro_delay = operation_config.get(
'long_running_operation_timeout',
self.config.long_running_operation_timeout)
if polling is True: polling_method = AsyncARMPolling(lro_delay, **operation_config)
elif polling is False: polling_method = AsyncNoPolling()
else: polling_method = polling
return await async_poller(self._client, raw_result, get_long_running_output, polling_method)
put_async_relative_retry_no_status_payload.metadata = {'url': '/lro/error/putasync/retry/nostatuspayload'}
async def _delete204_succeeded_initial(
self, *, custom_headers=None, raw=False, **operation_config):
# Construct URL
url = self.delete204_succeeded.metadata['url']
# Construct parameters
query_parameters = {}
# Construct headers
header_parameters = {}
if self.config.generate_client_request_id:
header_parameters['x-ms-client-request-id'] = str(uuid.uuid1())
if custom_headers:
header_parameters.update(custom_headers)
if self.config.accept_language is not None:
header_parameters['accept-language'] = self._serialize.header("self.config.accept_language", self.config.accept_language, 'str')
# Construct and send request
request = self._client.delete(url, query_parameters, header_parameters)
response = await self._client.async_send(request, stream=False, **operation_config)
if response.status_code not in [204]:
exp = CloudError(response)
exp.request_id = response.headers.get('x-ms-request-id')
raise exp
if raw:
client_raw_response = ClientRawResponse(None, response)
return client_raw_response
async def delete204_succeeded(
self, *, custom_headers=None, raw=False, polling=True, **operation_config):
"""Long running delete request, service returns a 204 to the initial
request, indicating success.
:param dict custom_headers: headers that will be added to the request
:param bool raw: The poller return type is ClientRawResponse, the
direct response alongside the deserialized response
:param polling: True for AsyncARMPolling, False for no polling, or a
polling object for personal polling strategy
:return: An instance of None or ClientRawResponse<None> if raw==True
:rtype: ~None or ~msrest.pipeline.ClientRawResponse[None]
:raises: :class:`CloudError<msrestazure.azure_exceptions.CloudError>`
"""
raw_result = await self._delete204_succeeded_initial(
custom_headers=custom_headers,
raw=True,
**operation_config
)
def get_long_running_output(response):
if raw:
client_raw_response = ClientRawResponse(None, response)
return client_raw_response
lro_delay = operation_config.get(
'long_running_operation_timeout',
self.config.long_running_operation_timeout)
if polling is True: polling_method = AsyncARMPolling(lro_delay, **operation_config)
elif polling is False: polling_method = AsyncNoPolling()
else: polling_method = polling
return await async_poller(self._client, raw_result, get_long_running_output, polling_method)
delete204_succeeded.metadata = {'url': '/lro/error/delete/204/nolocation'}
async def _delete_async_relative_retry_no_status_initial(
self, *, custom_headers=None, raw=False, **operation_config):
# Construct URL
url = self.delete_async_relative_retry_no_status.metadata['url']
# Construct parameters
query_parameters = {}
# Construct headers
header_parameters = {}
if self.config.generate_client_request_id:
header_parameters['x-ms-client-request-id'] = str(uuid.uuid1())
if custom_headers:
header_parameters.update(custom_headers)
if self.config.accept_language is not None:
header_parameters['accept-language'] = self._serialize.header("self.config.accept_language", self.config.accept_language, 'str')
# Construct and send request
request = self._client.delete(url, query_parameters, header_parameters)
response = await self._client.async_send(request, stream=False, **operation_config)
if response.status_code not in [202]:
exp = CloudError(response)
exp.request_id = response.headers.get('x-ms-request-id')
raise exp
if raw:
client_raw_response = ClientRawResponse(None, response)
header_dict = {
'Azure-AsyncOperation': 'str',
'Location': 'str',
'Retry-After': 'int',
}
client_raw_response.add_headers(header_dict)
return client_raw_response
async def delete_async_relative_retry_no_status(
self, *, custom_headers=None, raw=False, polling=True, **operation_config):
"""Long running delete request, service returns a 202 to the initial
request. Poll the endpoint indicated in the Azure-AsyncOperation header
for operation status.
:param dict custom_headers: headers that will be added to the request
:param bool raw: The poller return type is ClientRawResponse, the
direct response alongside the deserialized response
:param polling: True for AsyncARMPolling, False for no polling, or a
polling object for personal polling strategy
:return: An instance of None or ClientRawResponse<None> if raw==True
:rtype: ~None or ~msrest.pipeline.ClientRawResponse[None]
:raises: :class:`CloudError<msrestazure.azure_exceptions.CloudError>`
"""
raw_result = await self._delete_async_relative_retry_no_status_initial(
custom_headers=custom_headers,
raw=True,
**operation_config
)
def get_long_running_output(response):
if raw:
client_raw_response = ClientRawResponse(None, response)
client_raw_response.add_headers({
'Azure-AsyncOperation': 'str',
'Location': 'str',
'Retry-After': 'int',
})
return client_raw_response
lro_delay = operation_config.get(
'long_running_operation_timeout',
self.config.long_running_operation_timeout)
if polling is True: polling_method = AsyncARMPolling(lro_delay, **operation_config)
elif polling is False: polling_method = AsyncNoPolling()
else: polling_method = polling
return await async_poller(self._client, raw_result, get_long_running_output, polling_method)
delete_async_relative_retry_no_status.metadata = {'url': '/lro/error/deleteasync/retry/nostatus'}
async def _post202_no_location_initial(
self, product=None, *, custom_headers=None, raw=False, **operation_config):
# Construct URL
url = self.post202_no_location.metadata['url']
# Construct parameters
query_parameters = {}
# Construct headers
header_parameters = {}
header_parameters['Content-Type'] = 'application/json; charset=utf-8'
if self.config.generate_client_request_id:
header_parameters['x-ms-client-request-id'] = str(uuid.uuid1())
if custom_headers:
header_parameters.update(custom_headers)
if self.config.accept_language is not None:
header_parameters['accept-language'] = self._serialize.header("self.config.accept_language", self.config.accept_language, 'str')
# Construct body
if product is not None:
body_content = self._serialize.body(product, 'Product')
else:
body_content = None
# Construct and send request
request = self._client.post(url, query_parameters, header_parameters, body_content)
response = await self._client.async_send(request, stream=False, **operation_config)
if response.status_code not in [202]:
exp = CloudError(response)
exp.request_id = response.headers.get('x-ms-request-id')
raise exp
if raw:
client_raw_response = ClientRawResponse(None, response)
header_dict = {
'Location': 'str',
'Retry-After': 'int',
}
client_raw_response.add_headers(header_dict)
return client_raw_response
async def post202_no_location(
self, product=None, *, custom_headers=None, raw=False, polling=True, **operation_config):
"""Long running post request, service returns a 202 to the initial
request, without a location header.
:param product: Product to put
:type product: ~lro.models.Product
:param dict custom_headers: headers that will be added to the request
:param bool raw: The poller return type is ClientRawResponse, the
direct response alongside the deserialized response
:param polling: True for AsyncARMPolling, False for no polling, or a
polling object for personal polling strategy
:return: An instance of None or ClientRawResponse<None> if raw==True
:rtype: ~None or ~msrest.pipeline.ClientRawResponse[None]
:raises: :class:`CloudError<msrestazure.azure_exceptions.CloudError>`
"""
raw_result = await self._post202_no_location_initial(
product=product,
custom_headers=custom_headers,
raw=True,
**operation_config
)
def get_long_running_output(response):
if raw:
client_raw_response = ClientRawResponse(None, response)
client_raw_response.add_headers({
'Location': 'str',
'Retry-After': 'int',
})
return client_raw_response
lro_delay = operation_config.get(
'long_running_operation_timeout',
self.config.long_running_operation_timeout)
if polling is True: polling_method = AsyncARMPolling(lro_delay, **operation_config)
elif polling is False: polling_method = AsyncNoPolling()
else: polling_method = polling
return await async_poller(self._client, raw_result, get_long_running_output, polling_method)
post202_no_location.metadata = {'url': '/lro/error/post/202/nolocation'}
async def _post_async_relative_retry_no_payload_initial(
self, product=None, *, custom_headers=None, raw=False, **operation_config):
# Construct URL
url = self.post_async_relative_retry_no_payload.metadata['url']
# Construct parameters
query_parameters = {}
# Construct headers
header_parameters = {}
header_parameters['Content-Type'] = 'application/json; charset=utf-8'
if self.config.generate_client_request_id:
header_parameters['x-ms-client-request-id'] = str(uuid.uuid1())
if custom_headers:
header_parameters.update(custom_headers)
if self.config.accept_language is not None:
header_parameters['accept-language'] = self._serialize.header("self.config.accept_language", self.config.accept_language, 'str')
# Construct body
if product is not None:
body_content = self._serialize.body(product, 'Product')
else:
body_content = None
# Construct and send request
request = self._client.post(url, query_parameters, header_parameters, body_content)
response = await self._client.async_send(request, stream=False, **operation_config)
if response.status_code not in [202]:
exp = CloudError(response)
exp.request_id = response.headers.get('x-ms-request-id')
raise exp
if raw:
client_raw_response = ClientRawResponse(None, response)
header_dict = {
'Azure-AsyncOperation': 'str',
'Location': 'str',
'Retry-After': 'int',
}
client_raw_response.add_headers(header_dict)
return client_raw_response
async def post_async_relative_retry_no_payload(
self, product=None, *, custom_headers=None, raw=False, polling=True, **operation_config):
"""Long running post request, service returns a 202 to the initial
request, with an entity that contains ProvisioningState=’Creating’.
Poll the endpoint indicated in the Azure-AsyncOperation header for
operation status.
:param product: Product to put
:type product: ~lro.models.Product
:param dict custom_headers: headers that will be added to the request
:param bool raw: The poller return type is ClientRawResponse, the
direct response alongside the deserialized response
:param polling: True for AsyncARMPolling, False for no polling, or a
polling object for personal polling strategy
:return: An instance of None or ClientRawResponse<None> if raw==True
:rtype: ~None or ~msrest.pipeline.ClientRawResponse[None]
:raises: :class:`CloudError<msrestazure.azure_exceptions.CloudError>`
"""
raw_result = await self._post_async_relative_retry_no_payload_initial(
product=product,
custom_headers=custom_headers,
raw=True,
**operation_config
)
def get_long_running_output(response):
if raw:
client_raw_response = ClientRawResponse(None, response)
client_raw_response.add_headers({
'Azure-AsyncOperation': 'str',
'Location': 'str',
'Retry-After': 'int',
})
return client_raw_response
lro_delay = operation_config.get(
'long_running_operation_timeout',
self.config.long_running_operation_timeout)
if polling is True: polling_method = AsyncARMPolling(lro_delay, **operation_config)
elif polling is False: polling_method = AsyncNoPolling()
else: polling_method = polling
return await async_poller(self._client, raw_result, get_long_running_output, polling_method)
post_async_relative_retry_no_payload.metadata = {'url': '/lro/error/postasync/retry/nopayload'}
async def _put200_invalid_json_initial(
self, product=None, *, custom_headers=None, raw=False, **operation_config):
# Construct URL
url = self.put200_invalid_json.metadata['url']
# Construct parameters
query_parameters = {}
# Construct headers
header_parameters = {}
header_parameters['Accept'] = 'application/json'
header_parameters['Content-Type'] = 'application/json; charset=utf-8'
if self.config.generate_client_request_id:
header_parameters['x-ms-client-request-id'] = str(uuid.uuid1())
if custom_headers:
header_parameters.update(custom_headers)
if self.config.accept_language is not None:
header_parameters['accept-language'] = self._serialize.header("self.config.accept_language", self.config.accept_language, 'str')
# Construct body
if product is not None:
body_content = self._serialize.body(product, 'Product')
else:
body_content = None
# Construct and send request
request = self._client.put(url, query_parameters, header_parameters, body_content)
response = await self._client.async_send(request, stream=False, **operation_config)
if response.status_code not in [200, 204]:
exp = CloudError(response)
exp.request_id = response.headers.get('x-ms-request-id')
raise exp
deserialized = None
if response.status_code == 200:
deserialized = self._deserialize('Product', response)
if raw:
client_raw_response = ClientRawResponse(deserialized, response)
return client_raw_response
return deserialized
async def put200_invalid_json(
self, product=None, *, custom_headers=None, raw=False, polling=True, **operation_config):
"""Long running put request, service returns a 200 to the initial request,
with an entity that is not a valid json.
:param product: Product to put
:type product: ~lro.models.Product
:param dict custom_headers: headers that will be added to the request
:param bool raw: The poller return type is ClientRawResponse, the
direct response alongside the deserialized response
:param polling: True for AsyncARMPolling, False for no polling, or a
polling object for personal polling strategy
:return: An instance of Product or ClientRawResponse<Product> if
raw==True
:rtype: ~~lro.models.Product or
~msrest.pipeline.ClientRawResponse[~lro.models.Product]
:raises: :class:`CloudError<msrestazure.azure_exceptions.CloudError>`
"""
raw_result = await self._put200_invalid_json_initial(
product=product,
custom_headers=custom_headers,
raw=True,
**operation_config
)
def get_long_running_output(response):
deserialized = self._deserialize('Product', response)
if raw:
client_raw_response = ClientRawResponse(deserialized, response)
return client_raw_response
return deserialized
lro_delay = operation_config.get(
'long_running_operation_timeout',
self.config.long_running_operation_timeout)
if polling is True: polling_method = AsyncARMPolling(lro_delay, **operation_config)
elif polling is False: polling_method = AsyncNoPolling()
else: polling_method = polling
return await async_poller(self._client, raw_result, get_long_running_output, polling_method)
put200_invalid_json.metadata = {'url': '/lro/error/put/200/invalidjson'}
async def _put_async_relative_retry_invalid_header_initial(
self, product=None, *, custom_headers=None, raw=False, **operation_config):
# Construct URL
url = self.put_async_relative_retry_invalid_header.metadata['url']
# Construct parameters
query_parameters = {}
# Construct headers
header_parameters = {}
header_parameters['Accept'] = 'application/json'
header_parameters['Content-Type'] = 'application/json; charset=utf-8'
if self.config.generate_client_request_id:
header_parameters['x-ms-client-request-id'] = str(uuid.uuid1())
if custom_headers:
header_parameters.update(custom_headers)
if self.config.accept_language is not None:
header_parameters['accept-language'] = self._serialize.header("self.config.accept_language", self.config.accept_language, 'str')
# Construct body
if product is not None:
body_content = self._serialize.body(product, 'Product')
else:
body_content = None
# Construct and send request
request = self._client.put(url, query_parameters, header_parameters, body_content)
response = await self._client.async_send(request, stream=False, **operation_config)
if response.status_code not in [200]:
exp = CloudError(response)
exp.request_id = response.headers.get('x-ms-request-id')
raise exp
deserialized = None
header_dict = {}
if response.status_code == 200:
deserialized = self._deserialize('Product', response)
header_dict = {
'Azure-AsyncOperation': 'str',
'Location': 'str',
'Retry-After': 'int',
}
if raw:
client_raw_response = ClientRawResponse(deserialized, response)
client_raw_response.add_headers(header_dict)
return client_raw_response
return deserialized
async def put_async_relative_retry_invalid_header(
self, product=None, *, custom_headers=None, raw=False, polling=True, **operation_config):
"""Long running put request, service returns a 200 to the initial request,
with an entity that contains ProvisioningState=’Creating’. The endpoint
indicated in the Azure-AsyncOperation header is invalid.
:param product: Product to put
:type product: ~lro.models.Product
:param dict custom_headers: headers that will be added to the request
:param bool raw: The poller return type is ClientRawResponse, the
direct response alongside the deserialized response
:param polling: True for AsyncARMPolling, False for no polling, or a
polling object for personal polling strategy
:return: An instance of Product or ClientRawResponse<Product> if
raw==True
:rtype: ~~lro.models.Product or
~msrest.pipeline.ClientRawResponse[~lro.models.Product]
:raises: :class:`CloudError<msrestazure.azure_exceptions.CloudError>`
"""
raw_result = await self._put_async_relative_retry_invalid_header_initial(
product=product,
custom_headers=custom_headers,
raw=True,
**operation_config
)
def get_long_running_output(response):
header_dict = {
'Azure-AsyncOperation': 'str',
'Location': 'str',
'Retry-After': 'int',
}
deserialized = self._deserialize('Product', response)
if raw:
client_raw_response = ClientRawResponse(deserialized, response)
client_raw_response.add_headers(header_dict)
return client_raw_response
return deserialized
lro_delay = operation_config.get(
'long_running_operation_timeout',
self.config.long_running_operation_timeout)
if polling is True: polling_method = AsyncARMPolling(lro_delay, **operation_config)
elif polling is False: polling_method = AsyncNoPolling()
else: polling_method = polling
return await async_poller(self._client, raw_result, get_long_running_output, polling_method)
put_async_relative_retry_invalid_header.metadata = {'url': '/lro/error/putasync/retry/invalidheader'}
async def _put_async_relative_retry_invalid_json_polling_initial(
self, product=None, *, custom_headers=None, raw=False, **operation_config):
# Construct URL
url = self.put_async_relative_retry_invalid_json_polling.metadata['url']
# Construct parameters
query_parameters = {}
# Construct headers
header_parameters = {}
header_parameters['Accept'] = 'application/json'
header_parameters['Content-Type'] = 'application/json; charset=utf-8'
if self.config.generate_client_request_id:
header_parameters['x-ms-client-request-id'] = str(uuid.uuid1())
if custom_headers:
header_parameters.update(custom_headers)
if self.config.accept_language is not None:
header_parameters['accept-language'] = self._serialize.header("self.config.accept_language", self.config.accept_language, 'str')
# Construct body
if product is not None:
body_content = self._serialize.body(product, 'Product')
else:
body_content = None
# Construct and send request
request = self._client.put(url, query_parameters, header_parameters, body_content)
response = await self._client.async_send(request, stream=False, **operation_config)
if response.status_code not in [200]:
exp = CloudError(response)
exp.request_id = response.headers.get('x-ms-request-id')
raise exp
deserialized = None
header_dict = {}
if response.status_code == 200:
deserialized = self._deserialize('Product', response)
header_dict = {
'Azure-AsyncOperation': 'str',
'Location': 'str',
'Retry-After': 'int',
}
if raw:
client_raw_response = ClientRawResponse(deserialized, response)
client_raw_response.add_headers(header_dict)
return client_raw_response
return deserialized
async def put_async_relative_retry_invalid_json_polling(
self, product=None, *, custom_headers=None, raw=False, polling=True, **operation_config):
"""Long running put request, service returns a 200 to the initial request,
with an entity that contains ProvisioningState=’Creating’. Poll the
endpoint indicated in the Azure-AsyncOperation header for operation
status.
:param product: Product to put
:type product: ~lro.models.Product
:param dict custom_headers: headers that will be added to the request
:param bool raw: The poller return type is ClientRawResponse, the
direct response alongside the deserialized response
:param polling: True for AsyncARMPolling, False for no polling, or a
polling object for personal polling strategy
:return: An instance of Product or ClientRawResponse<Product> if
raw==True
:rtype: ~~lro.models.Product or
~msrest.pipeline.ClientRawResponse[~lro.models.Product]
:raises: :class:`CloudError<msrestazure.azure_exceptions.CloudError>`
"""
raw_result = await self._put_async_relative_retry_invalid_json_polling_initial(
product=product,
custom_headers=custom_headers,
raw=True,
**operation_config
)
def get_long_running_output(response):
header_dict = {
'Azure-AsyncOperation': 'str',
'Location': 'str',
'Retry-After': 'int',
}
deserialized = self._deserialize('Product', response)
if raw:
client_raw_response = ClientRawResponse(deserialized, response)
client_raw_response.add_headers(header_dict)
return client_raw_response
return deserialized
lro_delay = operation_config.get(
'long_running_operation_timeout',
self.config.long_running_operation_timeout)
if polling is True: polling_method = AsyncARMPolling(lro_delay, **operation_config)
elif polling is False: polling_method = AsyncNoPolling()
else: polling_method = polling
return await async_poller(self._client, raw_result, get_long_running_output, polling_method)
put_async_relative_retry_invalid_json_polling.metadata = {'url': '/lro/error/putasync/retry/invalidjsonpolling'}
async def _delete202_retry_invalid_header_initial(
self, *, custom_headers=None, raw=False, **operation_config):
# Construct URL
url = self.delete202_retry_invalid_header.metadata['url']
# Construct parameters
query_parameters = {}
# Construct headers
header_parameters = {}
if self.config.generate_client_request_id:
header_parameters['x-ms-client-request-id'] = str(uuid.uuid1())
if custom_headers:
header_parameters.update(custom_headers)
if self.config.accept_language is not None:
header_parameters['accept-language'] = self._serialize.header("self.config.accept_language", self.config.accept_language, 'str')
# Construct and send request
request = self._client.delete(url, query_parameters, header_parameters)
response = await self._client.async_send(request, stream=False, **operation_config)
if response.status_code not in [202]:
exp = CloudError(response)
exp.request_id = response.headers.get('x-ms-request-id')
raise exp
if raw:
client_raw_response = ClientRawResponse(None, response)
header_dict = {
'Location': 'str',
'Retry-After': 'int',
}
client_raw_response.add_headers(header_dict)
return client_raw_response
async def delete202_retry_invalid_header(
self, *, custom_headers=None, raw=False, polling=True, **operation_config):
"""Long running delete request, service returns a 202 to the initial
request receing a reponse with an invalid 'Location' and 'Retry-After'
headers.
:param dict custom_headers: headers that will be added to the request
:param bool raw: The poller return type is ClientRawResponse, the
direct response alongside the deserialized response
:param polling: True for AsyncARMPolling, False for no polling, or a
polling object for personal polling strategy
:return: An instance of None or ClientRawResponse<None> if raw==True
:rtype: ~None or ~msrest.pipeline.ClientRawResponse[None]
:raises: :class:`CloudError<msrestazure.azure_exceptions.CloudError>`
"""
raw_result = await self._delete202_retry_invalid_header_initial(
custom_headers=custom_headers,
raw=True,
**operation_config
)
def get_long_running_output(response):
if raw:
client_raw_response = ClientRawResponse(None, response)
client_raw_response.add_headers({
'Location': 'str',
'Retry-After': 'int',
})
return client_raw_response
lro_delay = operation_config.get(
'long_running_operation_timeout',
self.config.long_running_operation_timeout)
if polling is True: polling_method = AsyncARMPolling(lro_delay, **operation_config)
elif polling is False: polling_method = AsyncNoPolling()
else: polling_method = polling
return await async_poller(self._client, raw_result, get_long_running_output, polling_method)
delete202_retry_invalid_header.metadata = {'url': '/lro/error/delete/202/retry/invalidheader'}
async def _delete_async_relative_retry_invalid_header_initial(
self, *, custom_headers=None, raw=False, **operation_config):
# Construct URL
url = self.delete_async_relative_retry_invalid_header.metadata['url']
# Construct parameters
query_parameters = {}
# Construct headers
header_parameters = {}
if self.config.generate_client_request_id:
header_parameters['x-ms-client-request-id'] = str(uuid.uuid1())
if custom_headers:
header_parameters.update(custom_headers)
if self.config.accept_language is not None:
header_parameters['accept-language'] = self._serialize.header("self.config.accept_language", self.config.accept_language, 'str')
# Construct and send request
request = self._client.delete(url, query_parameters, header_parameters)
response = await self._client.async_send(request, stream=False, **operation_config)
if response.status_code not in [202]:
exp = CloudError(response)
exp.request_id = response.headers.get('x-ms-request-id')
raise exp
if raw:
client_raw_response = ClientRawResponse(None, response)
header_dict = {
'Azure-AsyncOperation': 'str',
'Location': 'str',
'Retry-After': 'int',
}
client_raw_response.add_headers(header_dict)
return client_raw_response
async def delete_async_relative_retry_invalid_header(
self, *, custom_headers=None, raw=False, polling=True, **operation_config):
"""Long running delete request, service returns a 202 to the initial
request. The endpoint indicated in the Azure-AsyncOperation header is
invalid.
:param dict custom_headers: headers that will be added to the request
:param bool raw: The poller return type is ClientRawResponse, the
direct response alongside the deserialized response
:param polling: True for AsyncARMPolling, False for no polling, or a
polling object for personal polling strategy
:return: An instance of None or ClientRawResponse<None> if raw==True
:rtype: ~None or ~msrest.pipeline.ClientRawResponse[None]
:raises: :class:`CloudError<msrestazure.azure_exceptions.CloudError>`
"""
raw_result = await self._delete_async_relative_retry_invalid_header_initial(
custom_headers=custom_headers,
raw=True,
**operation_config
)
def get_long_running_output(response):
if raw:
client_raw_response = ClientRawResponse(None, response)
client_raw_response.add_headers({
'Azure-AsyncOperation': 'str',
'Location': 'str',
'Retry-After': 'int',
})
return client_raw_response
lro_delay = operation_config.get(
'long_running_operation_timeout',
self.config.long_running_operation_timeout)
if polling is True: polling_method = AsyncARMPolling(lro_delay, **operation_config)
elif polling is False: polling_method = AsyncNoPolling()
else: polling_method = polling
return await async_poller(self._client, raw_result, get_long_running_output, polling_method)
delete_async_relative_retry_invalid_header.metadata = {'url': '/lro/error/deleteasync/retry/invalidheader'}
async def _delete_async_relative_retry_invalid_json_polling_initial(
self, *, custom_headers=None, raw=False, **operation_config):
# Construct URL
url = self.delete_async_relative_retry_invalid_json_polling.metadata['url']
# Construct parameters
query_parameters = {}
# Construct headers
header_parameters = {}
if self.config.generate_client_request_id:
header_parameters['x-ms-client-request-id'] = str(uuid.uuid1())
if custom_headers:
header_parameters.update(custom_headers)
if self.config.accept_language is not None:
header_parameters['accept-language'] = self._serialize.header("self.config.accept_language", self.config.accept_language, 'str')
# Construct and send request
request = self._client.delete(url, query_parameters, header_parameters)
response = await self._client.async_send(request, stream=False, **operation_config)
if response.status_code not in [202]:
exp = CloudError(response)
exp.request_id = response.headers.get('x-ms-request-id')
raise exp
if raw:
client_raw_response = ClientRawResponse(None, response)
header_dict = {
'Azure-AsyncOperation': 'str',
'Location': 'str',
'Retry-After': 'int',
}
client_raw_response.add_headers(header_dict)
return client_raw_response
async def delete_async_relative_retry_invalid_json_polling(
self, *, custom_headers=None, raw=False, polling=True, **operation_config):
"""Long running delete request, service returns a 202 to the initial
request. Poll the endpoint indicated in the Azure-AsyncOperation header
for operation status.
:param dict custom_headers: headers that will be added to the request
:param bool raw: The poller return type is ClientRawResponse, the
direct response alongside the deserialized response
:param polling: True for AsyncARMPolling, False for no polling, or a
polling object for personal polling strategy
:return: An instance of None or ClientRawResponse<None> if raw==True
:rtype: ~None or ~msrest.pipeline.ClientRawResponse[None]
:raises: :class:`CloudError<msrestazure.azure_exceptions.CloudError>`
"""
raw_result = await self._delete_async_relative_retry_invalid_json_polling_initial(
custom_headers=custom_headers,
raw=True,
**operation_config
)
def get_long_running_output(response):
if raw:
client_raw_response = ClientRawResponse(None, response)
client_raw_response.add_headers({
'Azure-AsyncOperation': 'str',
'Location': 'str',
'Retry-After': 'int',
})
return client_raw_response
lro_delay = operation_config.get(
'long_running_operation_timeout',
self.config.long_running_operation_timeout)
if polling is True: polling_method = AsyncARMPolling(lro_delay, **operation_config)
elif polling is False: polling_method = AsyncNoPolling()
else: polling_method = polling
return await async_poller(self._client, raw_result, get_long_running_output, polling_method)
delete_async_relative_retry_invalid_json_polling.metadata = {'url': '/lro/error/deleteasync/retry/invalidjsonpolling'}
async def _post202_retry_invalid_header_initial(
self, product=None, *, custom_headers=None, raw=False, **operation_config):
# Construct URL
url = self.post202_retry_invalid_header.metadata['url']
# Construct parameters
query_parameters = {}
# Construct headers
header_parameters = {}
header_parameters['Content-Type'] = 'application/json; charset=utf-8'
if self.config.generate_client_request_id:
header_parameters['x-ms-client-request-id'] = str(uuid.uuid1())
if custom_headers:
header_parameters.update(custom_headers)
if self.config.accept_language is not None:
header_parameters['accept-language'] = self._serialize.header("self.config.accept_language", self.config.accept_language, 'str')
# Construct body
if product is not None:
body_content = self._serialize.body(product, 'Product')
else:
body_content = None
# Construct and send request
request = self._client.post(url, query_parameters, header_parameters, body_content)
response = await self._client.async_send(request, stream=False, **operation_config)
if response.status_code not in [202]:
exp = CloudError(response)
exp.request_id = response.headers.get('x-ms-request-id')
raise exp
if raw:
client_raw_response = ClientRawResponse(None, response)
header_dict = {
'Location': 'str',
'Retry-After': 'int',
}
client_raw_response.add_headers(header_dict)
return client_raw_response
async def post202_retry_invalid_header(
self, product=None, *, custom_headers=None, raw=False, polling=True, **operation_config):
"""Long running post request, service returns a 202 to the initial
request, with invalid 'Location' and 'Retry-After' headers.
:param product: Product to put
:type product: ~lro.models.Product
:param dict custom_headers: headers that will be added to the request
:param bool raw: The poller return type is ClientRawResponse, the
direct response alongside the deserialized response
:param polling: True for AsyncARMPolling, False for no polling, or a
polling object for personal polling strategy
:return: An instance of None or ClientRawResponse<None> if raw==True
:rtype: ~None or ~msrest.pipeline.ClientRawResponse[None]
:raises: :class:`CloudError<msrestazure.azure_exceptions.CloudError>`
"""
raw_result = await self._post202_retry_invalid_header_initial(
product=product,
custom_headers=custom_headers,
raw=True,
**operation_config
)
def get_long_running_output(response):
if raw:
client_raw_response = ClientRawResponse(None, response)
client_raw_response.add_headers({
'Location': 'str',
'Retry-After': 'int',
})
return client_raw_response
lro_delay = operation_config.get(
'long_running_operation_timeout',
self.config.long_running_operation_timeout)
if polling is True: polling_method = AsyncARMPolling(lro_delay, **operation_config)
elif polling is False: polling_method = AsyncNoPolling()
else: polling_method = polling
return await async_poller(self._client, raw_result, get_long_running_output, polling_method)
post202_retry_invalid_header.metadata = {'url': '/lro/error/post/202/retry/invalidheader'}
async def _post_async_relative_retry_invalid_header_initial(
self, product=None, *, custom_headers=None, raw=False, **operation_config):
# Construct URL
url = self.post_async_relative_retry_invalid_header.metadata['url']
# Construct parameters
query_parameters = {}
# Construct headers
header_parameters = {}
header_parameters['Content-Type'] = 'application/json; charset=utf-8'
if self.config.generate_client_request_id:
header_parameters['x-ms-client-request-id'] = str(uuid.uuid1())
if custom_headers:
header_parameters.update(custom_headers)
if self.config.accept_language is not None:
header_parameters['accept-language'] = self._serialize.header("self.config.accept_language", self.config.accept_language, 'str')
# Construct body
if product is not None:
body_content = self._serialize.body(product, 'Product')
else:
body_content = None
# Construct and send request
request = self._client.post(url, query_parameters, header_parameters, body_content)
response = await self._client.async_send(request, stream=False, **operation_config)
if response.status_code not in [202]:
exp = CloudError(response)
exp.request_id = response.headers.get('x-ms-request-id')
raise exp
if raw:
client_raw_response = ClientRawResponse(None, response)
header_dict = {
'Azure-AsyncOperation': 'str',
'Location': 'str',
'Retry-After': 'int',
}
client_raw_response.add_headers(header_dict)
return client_raw_response
async def post_async_relative_retry_invalid_header(
self, product=None, *, custom_headers=None, raw=False, polling=True, **operation_config):
"""Long running post request, service returns a 202 to the initial
request, with an entity that contains ProvisioningState=’Creating’. The
endpoint indicated in the Azure-AsyncOperation header is invalid.
:param product: Product to put
:type product: ~lro.models.Product
:param dict custom_headers: headers that will be added to the request
:param bool raw: The poller return type is ClientRawResponse, the
direct response alongside the deserialized response
:param polling: True for AsyncARMPolling, False for no polling, or a
polling object for personal polling strategy
:return: An instance of None or ClientRawResponse<None> if raw==True
:rtype: ~None or ~msrest.pipeline.ClientRawResponse[None]
:raises: :class:`CloudError<msrestazure.azure_exceptions.CloudError>`
"""
raw_result = await self._post_async_relative_retry_invalid_header_initial(
product=product,
custom_headers=custom_headers,
raw=True,
**operation_config
)
def get_long_running_output(response):
if raw:
client_raw_response = ClientRawResponse(None, response)
client_raw_response.add_headers({
'Azure-AsyncOperation': 'str',
'Location': 'str',
'Retry-After': 'int',
})
return client_raw_response
lro_delay = operation_config.get(
'long_running_operation_timeout',
self.config.long_running_operation_timeout)
if polling is True: polling_method = AsyncARMPolling(lro_delay, **operation_config)
elif polling is False: polling_method = AsyncNoPolling()
else: polling_method = polling
return await async_poller(self._client, raw_result, get_long_running_output, polling_method)
post_async_relative_retry_invalid_header.metadata = {'url': '/lro/error/postasync/retry/invalidheader'}
async def _post_async_relative_retry_invalid_json_polling_initial(
self, product=None, *, custom_headers=None, raw=False, **operation_config):
# Construct URL
url = self.post_async_relative_retry_invalid_json_polling.metadata['url']
# Construct parameters
query_parameters = {}
# Construct headers
header_parameters = {}
header_parameters['Content-Type'] = 'application/json; charset=utf-8'
if self.config.generate_client_request_id:
header_parameters['x-ms-client-request-id'] = str(uuid.uuid1())
if custom_headers:
header_parameters.update(custom_headers)
if self.config.accept_language is not None:
header_parameters['accept-language'] = self._serialize.header("self.config.accept_language", self.config.accept_language, 'str')
# Construct body
if product is not None:
body_content = self._serialize.body(product, 'Product')
else:
body_content = None
# Construct and send request
request = self._client.post(url, query_parameters, header_parameters, body_content)
response = await self._client.async_send(request, stream=False, **operation_config)
if response.status_code not in [202]:
exp = CloudError(response)
exp.request_id = response.headers.get('x-ms-request-id')
raise exp
if raw:
client_raw_response = ClientRawResponse(None, response)
header_dict = {
'Azure-AsyncOperation': 'str',
'Location': 'str',
'Retry-After': 'int',
}
client_raw_response.add_headers(header_dict)
return client_raw_response
async def post_async_relative_retry_invalid_json_polling(
self, product=None, *, custom_headers=None, raw=False, polling=True, **operation_config):
"""Long running post request, service returns a 202 to the initial
request, with an entity that contains ProvisioningState=’Creating’.
Poll the endpoint indicated in the Azure-AsyncOperation header for
operation status.
:param product: Product to put
:type product: ~lro.models.Product
:param dict custom_headers: headers that will be added to the request
:param bool raw: The poller return type is ClientRawResponse, the
direct response alongside the deserialized response
:param polling: True for AsyncARMPolling, False for no polling, or a
polling object for personal polling strategy
:return: An instance of None or ClientRawResponse<None> if raw==True
:rtype: ~None or ~msrest.pipeline.ClientRawResponse[None]
:raises: :class:`CloudError<msrestazure.azure_exceptions.CloudError>`
"""
raw_result = await self._post_async_relative_retry_invalid_json_polling_initial(
product=product,
custom_headers=custom_headers,
raw=True,
**operation_config
)
def get_long_running_output(response):
if raw:
client_raw_response = ClientRawResponse(None, response)
client_raw_response.add_headers({
'Azure-AsyncOperation': 'str',
'Location': 'str',
'Retry-After': 'int',
})
return client_raw_response
lro_delay = operation_config.get(
'long_running_operation_timeout',
self.config.long_running_operation_timeout)
if polling is True: polling_method = AsyncARMPolling(lro_delay, **operation_config)
elif polling is False: polling_method = AsyncNoPolling()
else: polling_method = polling
return await async_poller(self._client, raw_result, get_long_running_output, polling_method)
post_async_relative_retry_invalid_json_polling.metadata = {'url': '/lro/error/postasync/retry/invalidjsonpolling'}
| 43.625055
| 140
| 0.652379
| 10,946
| 99,945
| 5.723552
| 0.019368
| 0.037765
| 0.039074
| 0.02988
| 0.977781
| 0.972977
| 0.967518
| 0.959633
| 0.952737
| 0.946816
| 0
| 0.008415
| 0.265196
| 99,945
| 2,290
| 141
| 43.644105
| 0.844662
| 0.031657
| 0
| 0.862841
| 0
| 0
| 0.091907
| 0.040252
| 0
| 0
| 0
| 0
| 0
| 1
| 0.018894
| false
| 0
| 0.004199
| 0
| 0.093072
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 7
|
7f7683ea2c070bd16f549fa0aa70c9b1ea551b00
| 161
|
py
|
Python
|
dataset/utils.py
|
arunreddy/pytorch-data
|
b90a40ea6e741e561e8357c371ab40764016c194
|
[
"MIT"
] | null | null | null |
dataset/utils.py
|
arunreddy/pytorch-data
|
b90a40ea6e741e561e8357c371ab40764016c194
|
[
"MIT"
] | null | null | null |
dataset/utils.py
|
arunreddy/pytorch-data
|
b90a40ea6e741e561e8357c371ab40764016c194
|
[
"MIT"
] | null | null | null |
import os
from pathlib import Path
def get_data_dir():
return Path(os.getenv("DATA_DIR", "/data"))
def get_raw_data_dir():
return get_data_dir() / "raw"
| 14.636364
| 45
| 0.708075
| 27
| 161
| 3.925926
| 0.444444
| 0.264151
| 0.188679
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.15528
| 161
| 10
| 46
| 16.1
| 0.779412
| 0
| 0
| 0
| 0
| 0
| 0.099379
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0.333333
| true
| 0
| 0.333333
| 0.333333
| 1
| 0
| 1
| 0
| 0
| null | 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 1
| 1
| 0
| 1
| 1
| 1
| 0
|
0
| 8
|
63f57d1ad0ff55393dde1533101473472f33cb2a
| 19,843
|
py
|
Python
|
tests/test_views.py
|
jessamynsmith/dj-stripe
|
139163b82a8f2b7cd52aff8fefc484b90302af4c
|
[
"BSD-3-Clause"
] | null | null | null |
tests/test_views.py
|
jessamynsmith/dj-stripe
|
139163b82a8f2b7cd52aff8fefc484b90302af4c
|
[
"BSD-3-Clause"
] | null | null | null |
tests/test_views.py
|
jessamynsmith/dj-stripe
|
139163b82a8f2b7cd52aff8fefc484b90302af4c
|
[
"BSD-3-Clause"
] | 1
|
2021-08-30T10:51:49.000Z
|
2021-08-30T10:51:49.000Z
|
"""
.. module:: dj-stripe.tests.test_views
:synopsis: dj-stripe View Tests.
.. moduleauthor:: Daniel Greenfeld (@pydanny)
.. moduleauthor:: Alex Kavanaugh (@kavdev)
"""
from decimal import Decimal
from django.contrib.auth import get_user_model
from django.core.urlresolvers import reverse
from django.test.client import RequestFactory
from django.test.testcases import TestCase
import stripe
from mock import patch, PropertyMock
from djstripe import settings as djstripe_settings
from djstripe.models import Customer, CurrentSubscription
from djstripe.views import ChangeCardView, HistoryView
class AccountViewTest(TestCase):
fake_stripe_customer_id = "cus_xxx1234567890"
def setUp(self):
self.url = reverse("djstripe:account")
self.user = get_user_model().objects.create_user(username="testuser",
email="test@example.com",
password="123")
self.assertTrue(self.client.login(username="testuser", password="123"))
@patch("stripe.Customer.create", return_value=PropertyMock(id=fake_stripe_customer_id))
def test_autocreate_customer(self, stripe_create_customer_mock):
self.assertEqual(Customer.objects.count(), 0)
response = self.client.get(self.url)
# simply visiting the page should generate a new customer record.
stripe_create_customer_mock.assert_called_once_with(email=self.user.email)
self.assertEqual(self.fake_stripe_customer_id, response.context["customer"].stripe_id)
self.assertEqual(self.user, response.context["customer"].subscriber)
self.assertEqual(Customer.objects.count(), 1)
@patch("stripe.Customer.create", return_value=PropertyMock(id=fake_stripe_customer_id))
def test_plan_list_context(self, stripe_create_customer_mock):
response = self.client.get(self.url)
self.assertEqual(djstripe_settings.PLAN_LIST, response.context["plans"])
@patch("stripe.Customer.create", return_value=PropertyMock(id=fake_stripe_customer_id))
def test_subscription_context(self, stripe_create_customer_mock):
response = self.client.get(self.url)
self.assertEqual(None, response.context["subscription"])
@patch("djstripe.models.Customer.current_subscription", new_callable=PropertyMock, return_value=CurrentSubscription(plan="test_plan_07"))
@patch("stripe.Customer.create", return_value=PropertyMock(id=fake_stripe_customer_id))
def test_subscription_context_with_plan(self, djstripe_customer_customer_subscription_mock, stripe_create_customer_mock):
response = self.client.get(self.url)
self.assertEqual("test_plan_07", response.context["subscription"].plan)
class ChangeCardViewTest(TestCase):
def setUp(self):
self.url = reverse("djstripe:change_card")
self.user = get_user_model().objects.create_user(username="testuser",
email="test@example.com",
password="123")
self.assertTrue(self.client.login(username="testuser", password="123"))
@patch("stripe.Customer.create", return_value=PropertyMock(id="cus_xxx1234567890"))
def test_get(self, stripe_create_customer_mock):
response = self.client.get(self.url)
self.assertEqual(200, response.status_code)
@patch("djstripe.models.Customer.retry_unpaid_invoices", autospec=True)
@patch("djstripe.models.Customer.send_invoice", autospec=True)
@patch("djstripe.models.Customer.update_card", autospec=True)
@patch("stripe.Customer.create", return_value=PropertyMock(id="cus_xxx1234567890"))
def test_post_new_card(self, stripe_customer_mock, update_card_mock, send_invoice_mock, retry_unpaid_invoices_mock):
self.client.post(self.url, {"stripe_token": "alpha"})
update_card_mock.assert_called_once_with(self.user.customer, "alpha")
send_invoice_mock.assert_called_with(self.user.customer)
retry_unpaid_invoices_mock.assert_called_once_with(self.user.customer)
@patch("djstripe.models.Customer.retry_unpaid_invoices", autospec=True)
@patch("djstripe.models.Customer.send_invoice", autospec=True)
@patch("djstripe.models.Customer.update_card", autospec=True)
@patch("stripe.Customer.create", return_value=PropertyMock(id="cus_xxx1234567890"))
def test_post_change_card(self, stripe_customer_mock, update_card_mock, send_invoice_mock, retry_unpaid_invoices_mock):
Customer.objects.get_or_create(subscriber=self.user, card_fingerprint="4449")
self.assertEqual(1, Customer.objects.count())
self.client.post(self.url, {"stripe_token": "beta"})
self.assertEqual(1, Customer.objects.count())
update_card_mock.assert_called_once_with(self.user.customer, "beta")
self.assertFalse(send_invoice_mock.called)
retry_unpaid_invoices_mock.assert_called_once_with(self.user.customer)
@patch("djstripe.models.Customer.update_card", autospec=True)
@patch("stripe.Customer.create", return_value=PropertyMock(id="cus_xxx1234567890"))
def test_post_card_error(self, stripe_create_customer_mock, update_card_mock):
update_card_mock.side_effect = stripe.StripeError("An error occurred while processing your card.")
response = self.client.post(self.url, {"stripe_token": "pie"})
update_card_mock.assert_called_once_with(self.user.customer, "pie")
self.assertIn("stripe_error", response.context)
self.assertIn("An error occurred while processing your card.", response.context["stripe_error"])
@patch("djstripe.models.Customer.update_card", autospec=True)
@patch("stripe.Customer.create", return_value=PropertyMock(id="cus_xxx1234567890"))
def test_post_no_card(self, stripe_create_customer_mock, update_card_mock):
update_card_mock.side_effect = stripe.StripeError("Invalid source object:")
response = self.client.post(self.url)
update_card_mock.assert_called_once_with(self.user.customer, None)
self.assertIn("stripe_error", response.context)
self.assertIn("Invalid source object:", response.context["stripe_error"])
@patch("stripe.Customer.create", return_value=PropertyMock(id="cus_xxx1234567890"))
def test_get_object(self, stripe_create_customer_mock):
view_instance = ChangeCardView()
request = RequestFactory()
request.user = self.user
view_instance.request = request
object_a = view_instance.get_object()
object_b = view_instance.get_object()
customer_instance = Customer.objects.get(subscriber=self.user)
self.assertEqual(customer_instance, object_a)
self.assertEqual(object_a, object_b)
def test_get_success_url(self):
view_instance = ChangeCardView()
url = view_instance.get_post_success_url()
self.assertEqual(reverse("djstripe:account"), url)
class HistoryViewTest(TestCase):
def setUp(self):
self.url = reverse("djstripe:history")
self.user = get_user_model().objects.create_user(username="testuser",
email="test@example.com",
password="123")
self.assertTrue(self.client.login(username="testuser", password="123"))
@patch("stripe.Customer.create", return_value=PropertyMock(id="cus_xxx1234567890"))
def test_get_object(self, stripe_create_customer_mock):
view_instance = HistoryView()
request = RequestFactory()
request.user = self.user
view_instance.request = request
object_a = view_instance.get_object()
stripe_create_customer_mock.assert_called_once_with(email=self.user.email)
customer_instance = Customer.objects.get(subscriber=self.user)
self.assertEqual(customer_instance, object_a)
class SyncHistoryViewTest(TestCase):
def setUp(self):
self.url = reverse("djstripe:sync_history")
self.user = get_user_model().objects.create_user(username="testuser",
email="test@example.com",
password="123")
self.assertTrue(self.client.login(username="testuser", password="123"))
@patch("djstripe.views.sync_subscriber", new_callable=PropertyMock, return_value=PropertyMock(subscriber="pie"))
def test_post(self, sync_subscriber_mock):
response = self.client.post(self.url)
sync_subscriber_mock.assert_called_once_with(self.user)
self.assertEqual("pie", response.context["customer"].subscriber)
class ConfirmFormViewTest(TestCase):
fake_stripe_customer_id = "cus_xxx1234567890"
def setUp(self):
self.plan = "test0"
self.url = reverse("djstripe:confirm", kwargs={'plan': self.plan})
self.user = get_user_model().objects.create_user(username="testuser",
email="test@example.com",
password="123")
self.assertTrue(self.client.login(username="testuser", password="123"))
@patch("djstripe.models.Customer.current_subscription", new_callable=PropertyMock, return_value=CurrentSubscription(plan="something-else"))
@patch("stripe.Customer.create", return_value=PropertyMock(id=fake_stripe_customer_id))
def test_get_form_valid(self, djstripe_customer_customer_subscription_mock, stripe_create_customer_mock):
response = self.client.get(self.url)
self.assertEqual(200, response.status_code)
@patch("djstripe.models.Customer.current_subscription", new_callable=PropertyMock, return_value=CurrentSubscription(plan="test0"))
@patch("stripe.Customer.create", return_value=PropertyMock(id=fake_stripe_customer_id))
def test_get_form_unknown(self, djstripe_customer_customer_subscription_mock, stripe_create_customer_mock):
response = self.client.get(reverse("djstripe:confirm", kwargs={'plan': 'does-not-exist'}))
self.assertRedirects(response, reverse("djstripe:subscribe"))
@patch("djstripe.models.Customer.current_subscription", new_callable=PropertyMock, return_value=CurrentSubscription(plan="test0"))
@patch("stripe.Customer.create", return_value=PropertyMock(id=fake_stripe_customer_id))
def test_get_form_invalid(self, djstripe_customer_customer_subscription_mock, stripe_create_customer_mock):
response = self.client.get(self.url)
self.assertRedirects(response, reverse("djstripe:subscribe"))
@patch("djstripe.models.Customer.subscribe", autospec=True)
@patch("djstripe.models.Customer.update_card", autospec=True)
@patch("stripe.Customer.create", return_value=PropertyMock(id="cus_xxx1234567890"))
def test_post_valid(self, stripe_customer_mock, update_card_mock, subscribe_mock):
self.assertEqual(0, Customer.objects.count())
response = self.client.post(self.url, {"plan": self.plan, "stripe_token": "cake"})
self.assertEqual(1, Customer.objects.count())
update_card_mock.assert_called_once_with(self.user.customer, "cake")
subscribe_mock.assert_called_once_with(self.user.customer, self.plan)
self.assertRedirects(response, reverse("djstripe:history"))
@patch("djstripe.models.Customer.subscribe", autospec=True)
@patch("djstripe.models.Customer.update_card", autospec=True)
@patch("stripe.Customer.create", return_value=PropertyMock(id="cus_xxx1234567890"))
def test_post_no_card(self, stripe_customer_mock, update_card_mock, subscribe_mock):
update_card_mock.side_effect = stripe.StripeError("Invalid source object:")
response = self.client.post(self.url, {"plan": self.plan})
self.assertEqual(200, response.status_code)
self.assertIn("form", response.context)
self.assertIn("Invalid source object:", response.context["form"].errors["__all__"])
@patch("stripe.Customer.create", return_value=PropertyMock(id="cus_xxx1234567890"))
def test_post_form_invalid(self, stripe_customer_mock):
response = self.client.post(self.url)
self.assertEqual(200, response.status_code)
self.assertIn("plan", response.context["form"].errors)
self.assertIn("This field is required.", response.context["form"].errors["plan"])
class ChangePlanViewTest(TestCase):
@patch("stripe.Customer.create", return_value=PropertyMock(id="cus_xxx1234567890_01"))
def setUp(self, stripe_customer_mock):
self.url = reverse("djstripe:change_plan")
self.user1 = get_user_model().objects.create_user(username="testuser1",
email="test@example.com",
password="123")
self.user2 = get_user_model().objects.create_user(username="testuser2",
email="test@example.com",
password="123")
Customer.get_or_create(subscriber=self.user1)
@patch("stripe.Customer.create", return_value=PropertyMock(id="cus_xxx1234567890"))
def test_post_form_invalid(self, stripe_customer_mock):
self.assertTrue(self.client.login(username="testuser1", password="123"))
response = self.client.post(self.url)
self.assertEqual(200, response.status_code)
self.assertIn("plan", response.context["form"].errors)
self.assertIn("This field is required.", response.context["form"].errors["plan"])
@patch("stripe.Customer.create", return_value=PropertyMock(id="cus_xxx1234567890_02"))
def test_post_new_sub_no_proration(self, stripe_customer_mock):
self.assertTrue(self.client.login(username="testuser2", password="123"))
response = self.client.post(self.url, {"plan": "test0"})
self.assertEqual(200, response.status_code)
self.assertIn("form", response.context)
self.assertIn("You must already be subscribed to a plan before you can change it.", response.context["form"].errors["__all__"])
@patch("djstripe.models.Customer.current_subscription", new_callable=PropertyMock, return_value=CurrentSubscription(plan="test", amount=Decimal(25.00)))
@patch("djstripe.models.Customer.subscribe", autospec=True)
def test_change_sub_no_proration(self, subscribe_mock, current_subscription_mock):
self.assertTrue(self.client.login(username="testuser1", password="123"))
response = self.client.post(self.url, {"plan": "test0"})
self.assertRedirects(response, reverse("djstripe:history"))
subscribe_mock.assert_called_once_with(self.user1.customer, "test0")
@patch("djstripe.views.PRORATION_POLICY_FOR_UPGRADES", return_value=True)
@patch("djstripe.models.Customer.current_subscription", new_callable=PropertyMock, return_value=CurrentSubscription(plan="test", amount=Decimal(25.00)))
@patch("djstripe.models.Customer.subscribe", autospec=True)
def test_change_sub_with_proration_downgrade(self, subscribe_mock, current_subscription_mock, proration_policy_mock):
self.assertTrue(self.client.login(username="testuser1", password="123"))
response = self.client.post(self.url, {"plan": "test0"})
self.assertRedirects(response, reverse("djstripe:history"))
subscribe_mock.assert_called_once_with(self.user1.customer, "test0")
@patch("djstripe.views.PRORATION_POLICY_FOR_UPGRADES", return_value=True)
@patch("djstripe.models.Customer.current_subscription", new_callable=PropertyMock, return_value=CurrentSubscription(plan="test", amount=Decimal(25.00)))
@patch("djstripe.models.Customer.subscribe", autospec=True)
def test_change_sub_with_proration_upgrade(self, subscribe_mock, current_subscription_mock, proration_policy_mock):
self.assertTrue(self.client.login(username="testuser1", password="123"))
response = self.client.post(self.url, {"plan": "test2"})
self.assertRedirects(response, reverse("djstripe:history"))
subscribe_mock.assert_called_once_with(self.user1.customer, "test2", prorate=True)
@patch("djstripe.views.PRORATION_POLICY_FOR_UPGRADES", return_value=True)
@patch("djstripe.models.Customer.current_subscription", new_callable=PropertyMock, return_value=CurrentSubscription(plan="test", amount=Decimal(25.00)))
@patch("djstripe.models.Customer.subscribe", autospec=True)
def test_change_sub_with_proration_same_plan(self, subscribe_mock, current_subscription_mock, proration_policy_mock):
self.assertTrue(self.client.login(username="testuser1", password="123"))
response = self.client.post(self.url, {"plan": "test"})
self.assertRedirects(response, reverse("djstripe:history"))
subscribe_mock.assert_called_once_with(self.user1.customer, "test")
@patch("djstripe.models.Customer.current_subscription", new_callable=PropertyMock, return_value=CurrentSubscription(plan="test", amount=Decimal(25.00)))
@patch("djstripe.models.Customer.subscribe", autospec=True)
def test_change_sub_same_plan(self, subscribe_mock, current_subscription_mock):
self.assertTrue(self.client.login(username="testuser1", password="123"))
response = self.client.post(self.url, {"plan": "test"})
self.assertRedirects(response, reverse("djstripe:history"))
subscribe_mock.assert_called_once_with(self.user1.customer, "test")
@patch("djstripe.models.Customer.subscribe", autospec=True)
def test_change_sub_stripe_error(self, subscribe_mock):
subscribe_mock.side_effect = stripe.StripeError("No such plan: test_id_3")
self.assertTrue(self.client.login(username="testuser1", password="123"))
response = self.client.post(self.url, {"plan": "test_deletion"})
self.assertEqual(200, response.status_code)
self.assertIn("form", response.context)
self.assertIn("No such plan: test_id_3", response.context["form"].errors["__all__"])
class CancelSubscriptionViewTest(TestCase):
def setUp(self):
self.url = reverse("djstripe:cancel_subscription")
self.user = get_user_model().objects.create_user(username="testuser",
email="test@example.com",
password="123")
self.assertTrue(self.client.login(username="testuser", password="123"))
@patch("stripe.Customer.create", return_value=PropertyMock(id="cus_xxx1234567890"))
@patch("djstripe.models.Customer.cancel_subscription", return_value=CurrentSubscription(status=CurrentSubscription.STATUS_ACTIVE))
def test_cancel_proration(self, cancel_subscription_mock, stripe_create_customer_mock):
response = self.client.post(self.url)
cancel_subscription_mock.assert_called_once_with(at_period_end=djstripe_settings.CANCELLATION_AT_PERIOD_END)
self.assertRedirects(response, reverse("djstripe:account"))
self.assertTrue(self.user.is_authenticated())
@patch("djstripe.views.auth_logout", autospec=True)
@patch("stripe.Customer.create", return_value=PropertyMock(id="cus_xxx1234567890"))
@patch("djstripe.models.Customer.cancel_subscription", return_value=CurrentSubscription(status=CurrentSubscription.STATUS_CANCELLED))
def test_cancel_no_proration(self, cancel_subscription_mock, stripe_create_customer_mock, logout_mock):
response = self.client.post(self.url)
cancel_subscription_mock.assert_called_once_with(at_period_end=djstripe_settings.CANCELLATION_AT_PERIOD_END)
self.assertEqual(response.status_code, 302)
self.assertTrue(logout_mock.called)
| 54.663912
| 156
| 0.714559
| 2,297
| 19,843
| 5.924249
| 0.088811
| 0.041152
| 0.040491
| 0.05754
| 0.845826
| 0.814374
| 0.798354
| 0.776602
| 0.748016
| 0.72259
| 0
| 0.019866
| 0.167918
| 19,843
| 362
| 157
| 54.814917
| 0.804312
| 0.011541
| 0
| 0.608059
| 0
| 0
| 0.184137
| 0.095282
| 0
| 0
| 0
| 0
| 0.304029
| 1
| 0.131868
| false
| 0.080586
| 0.03663
| 0
| 0.201465
| 0.003663
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
|
0
| 7
|
1213cef34f88fa5ed8c027243e1816e03a984ad1
| 18,612
|
py
|
Python
|
SimPEG/EM/FDEM/SrcFDEM.py
|
KyuboNoh/HY
|
8ba9815137c2cff2f1931a1940e1b762e8df0b02
|
[
"MIT"
] | 1
|
2020-11-27T03:26:22.000Z
|
2020-11-27T03:26:22.000Z
|
SimPEG/EM/FDEM/SrcFDEM.py
|
KyuboNoh/HY
|
8ba9815137c2cff2f1931a1940e1b762e8df0b02
|
[
"MIT"
] | null | null | null |
SimPEG/EM/FDEM/SrcFDEM.py
|
KyuboNoh/HY
|
8ba9815137c2cff2f1931a1940e1b762e8df0b02
|
[
"MIT"
] | null | null | null |
from SimPEG import Survey, Problem, Utils, np, sp
from scipy.constants import mu_0
from SimPEG.EM.Utils import *
from SimPEG.Utils import Zero
class BaseSrc(Survey.BaseSrc):
"""
Base source class for FDEM Survey
"""
freq = None
# rxPair = RxFDEM
integrate = True
def eval(self, prob):
"""
Evaluate the source terms.
- :math:`S_m` : magnetic source term
- :math:`S_e` : electric source term
:param Problem prob: FDEM Problem
:rtype: (numpy.ndarray, numpy.ndarray)
:return: tuple with magnetic source term and electric source term
"""
S_m = self.S_m(prob)
S_e = self.S_e(prob)
return S_m, S_e
def evalDeriv(self, prob, v=None, adjoint=False):
"""
Derivatives of the source terms with respect to the inversion model
- :code:`S_mDeriv` : derivative of the magnetic source term
- :code:`S_eDeriv` : derivative of the electric source term
:param Problem prob: FDEM Problem
:param numpy.ndarray v: vector to take product with
:param bool adjoint: adjoint?
:rtype: (numpy.ndarray, numpy.ndarray)
:return: tuple with magnetic source term and electric source term derivatives times a vector
"""
if v is not None:
return self.S_mDeriv(prob,v,adjoint), self.S_eDeriv(prob,v,adjoint)
else:
return lambda v: self.S_mDeriv(prob,v,adjoint), lambda v: self.S_eDeriv(prob,v,adjoint)
def bPrimary(self, prob):
"""
Primary magnetic flux density
:param Problem prob: FDEM Problem
:rtype: numpy.ndarray
:return: primary magnetic flux density
"""
return Zero()
def hPrimary(self, prob):
"""
Primary magnetic field
:param Problem prob: FDEM Problem
:rtype: numpy.ndarray
:return: primary magnetic field
"""
return Zero()
def ePrimary(self, prob):
"""
Primary electric field
:param Problem prob: FDEM Problem
:rtype: numpy.ndarray
:return: primary electric field
"""
return Zero()
def jPrimary(self, prob):
"""
Primary current density
:param Problem prob: FDEM Problem
:rtype: numpy.ndarray
:return: primary current density
"""
return Zero()
def S_m(self, prob):
"""
Magnetic source term
:param Problem prob: FDEM Problem
:rtype: numpy.ndarray
:return: magnetic source term on mesh
"""
return Zero()
def S_e(self, prob):
"""
Electric source term
:param Problem prob: FDEM Problem
:rtype: numpy.ndarray
:return: electric source term on mesh
"""
return Zero()
def S_mDeriv(self, prob, v, adjoint = False):
"""
Derivative of magnetic source term with respect to the inversion model
:param Problem prob: FDEM Problem
:param numpy.ndarray v: vector to take product with
:param bool adjoint: adjoint?
:rtype: numpy.ndarray
:return: product of magnetic source term derivative with a vector
"""
return Zero()
def S_eDeriv(self, prob, v, adjoint = False):
"""
Derivative of electric source term with respect to the inversion model
:param Problem prob: FDEM Problem
:param numpy.ndarray v: vector to take product with
:param bool adjoint: adjoint?
:rtype: numpy.ndarray
:return: product of electric source term derivative with a vector
"""
return Zero()
class RawVec_e(BaseSrc):
"""
RawVec electric source. It is defined by the user provided vector S_e
:param list rxList: receiver list
:param float freq: frequency
:param numpy.array S_e: electric source term
"""
def __init__(self, rxList, freq, S_e): #, ePrimary=None, bPrimary=None, hPrimary=None, jPrimary=None):
self._S_e = np.array(S_e,dtype=complex)
self.freq = float(freq)
BaseSrc.__init__(self, rxList)
def S_e(self, prob):
return self._S_e
class RawVec_m(BaseSrc):
"""
RawVec magnetic source. It is defined by the user provided vector S_m
:param float freq: frequency
:param rxList: receiver list
:param numpy.array S_m: magnetic source term
"""
def __init__(self, rxList, freq, S_m, integrate = True): #ePrimary=Zero(), bPrimary=Zero(), hPrimary=Zero(), jPrimary=Zero()):
self._S_m = np.array(S_m,dtype=complex)
self.freq = float(freq)
self.integrate = integrate
BaseSrc.__init__(self, rxList)
def S_m(self, prob):
"""
Magnetic source term
:param Problem prob: FDEM Problem
:rtype: numpy.ndarray
:return: magnetic source term on mesh
"""
return self._S_m
class RawVec(BaseSrc):
"""
RawVec source. It is defined by the user provided vectors S_m, S_e
:param rxList: receiver list
:param float freq: frequency
:param numpy.array S_m: magnetic source term
:param numpy.array S_e: electric source term
"""
def __init__(self, rxList, freq, S_m, S_e, integrate = True):
self._S_m = np.array(S_m,dtype=complex)
self._S_e = np.array(S_e,dtype=complex)
self.freq = float(freq)
self.integrate = integrate
BaseSrc.__init__(self, rxList)
def S_m(self, prob):
if prob._eqLocs is 'EF' and self.integrate is True:
return prob.Me * self._S_m
return self._S_m
def S_e(self, prob):
if prob._eqLocs is 'FE' and self.integrate is True:
return prob.Me * self._S_e
return self._S_e
class MagDipole(BaseSrc):
"""
Point magnetic dipole source calculated by taking the curl of a magnetic
vector potential. By taking the discrete curl, we ensure that the magnetic
flux density is divergence free (no magnetic monopoles!).
This approach uses a primary-secondary in frequency. Here we show the
derivation for E-B formulation noting that similar steps are followed for
the H-J formulation.
.. math::
\mathbf{C} \mathbf{e} + i \omega \mathbf{b} = \mathbf{s_m} \\\\
{\mathbf{C}^T \mathbf{M_{\mu^{-1}}^f} \mathbf{b} - \mathbf{M_{\sigma}^e} \mathbf{e} = \mathbf{s_e}}
We split up the fields and :math:`\mu^{-1}` into primary (:math:`\mathbf{P}`) and secondary (:math:`\mathbf{S}`) components
- :math:`\mathbf{e} = \mathbf{e^P} + \mathbf{e^S}`
- :math:`\mathbf{b} = \mathbf{b^P} + \mathbf{b^S}`
- :math:`\\boldsymbol{\mu}^{\mathbf{-1}} = \\boldsymbol{\mu}^{\mathbf{-1}^\mathbf{P}} + \\boldsymbol{\mu}^{\mathbf{-1}^\mathbf{S}}`
and define a zero-frequency primary problem, noting that the source is
generated by a divergence free electric current
.. math::
\mathbf{C} \mathbf{e^P} = \mathbf{s_m^P} = 0 \\\\
{\mathbf{C}^T \mathbf{{M_{\mu^{-1}}^f}^P} \mathbf{b^P} - \mathbf{M_{\sigma}^e} \mathbf{e^P} = \mathbf{M^e} \mathbf{s_e^P}}
Since :math:`\mathbf{e^P}` is curl-free, divergence-free, we assume that there is no constant field background, the :math:`\mathbf{e^P} = 0`, so our primary problem is
.. math::
\mathbf{e^P} = 0 \\\\
{\mathbf{C}^T \mathbf{{M_{\mu^{-1}}^f}^P} \mathbf{b^P} = \mathbf{s_e^P}}
Our secondary problem is then
.. math::
\mathbf{C} \mathbf{e^S} + i \omega \mathbf{b^S} = - i \omega \mathbf{b^P} \\\\
{\mathbf{C}^T \mathbf{M_{\mu^{-1}}^f} \mathbf{b^S} - \mathbf{M_{\sigma}^e} \mathbf{e^S} = -\mathbf{C}^T \mathbf{{M_{\mu^{-1}}^f}^S} \mathbf{b^P}}
:param list rxList: receiver list
:param float freq: frequency
:param numpy.ndarray loc: source location (ie: :code:`np.r_[xloc,yloc,zloc]`)
:param string orientation: 'X', 'Y', 'Z'
:param float moment: magnetic dipole moment
:param float mu: background magnetic permeability
"""
#TODO: right now, orientation doesn't actually do anything! The methods in SrcUtils should take care of that
def __init__(self, rxList, freq, loc, orientation='Z', moment=1., mu = mu_0):
self.freq = float(freq)
self.loc = loc
self.orientation = orientation
self.moment = moment
self.mu = mu
self.integrate = False
BaseSrc.__init__(self, rxList)
def bPrimary(self, prob):
"""
The primary magnetic flux density from a magnetic vector potential
:param Problem prob: FDEM problem
:rtype: numpy.ndarray
:return: primary magnetic field
"""
eqLocs = prob._eqLocs
if eqLocs is 'FE':
gridX = prob.mesh.gridEx
gridY = prob.mesh.gridEy
gridZ = prob.mesh.gridEz
C = prob.mesh.edgeCurl
elif eqLocs is 'EF':
gridX = prob.mesh.gridFx
gridY = prob.mesh.gridFy
gridZ = prob.mesh.gridFz
C = prob.mesh.edgeCurl.T
if prob.mesh._meshType is 'CYL':
if not prob.mesh.isSymmetric:
# TODO ?
raise NotImplementedError('Non-symmetric cyl mesh not implemented yet!')
a = MagneticDipoleVectorPotential(self.loc, gridY, 'y', mu=self.mu, moment=self.moment)
else:
srcfct = MagneticDipoleVectorPotential
ax = srcfct(self.loc, gridX, 'x', mu=self.mu, moment=self.moment)
ay = srcfct(self.loc, gridY, 'y', mu=self.mu, moment=self.moment)
az = srcfct(self.loc, gridZ, 'z', mu=self.mu, moment=self.moment)
a = np.concatenate((ax, ay, az))
return C*a
def hPrimary(self, prob):
"""
The primary magnetic field from a magnetic vector potential
:param Problem prob: FDEM problem
:rtype: numpy.ndarray
:return: primary magnetic field
"""
b = self.bPrimary(prob)
return h_from_b(prob,b)
def S_m(self, prob):
"""
The magnetic source term
:param Problem prob: FDEM problem
:rtype: numpy.ndarray
:return: primary magnetic field
"""
b_p = self.bPrimary(prob)
return -1j*omega(self.freq)*b_p
def S_e(self, prob):
"""
The electric source term
:param Problem prob: FDEM problem
:rtype: numpy.ndarray
:return: primary magnetic field
"""
if all(np.r_[self.mu] == np.r_[prob.curModel.mu]):
return Zero()
else:
eqLocs = prob._eqLocs
if eqLocs is 'FE':
mui_s = prob.curModel.mui - 1./self.mu
MMui_s = prob.mesh.getFaceInnerProduct(mui_s)
C = prob.mesh.edgeCurl
elif eqLocs is 'EF':
mu_s = prob.curModel.mu - self.mu
MMui_s = prob.mesh.getEdgeInnerProduct(mu_s,invMat=True)
C = prob.mesh.edgeCurl.T
return -C.T * (MMui_s * self.bPrimary(prob))
class MagDipole_Bfield(BaseSrc):
"""
Point magnetic dipole source calculated with the analytic solution for the
fields from a magnetic dipole. No discrete curl is taken, so the magnetic
flux density may not be strictly divergence free.
This approach uses a primary-secondary in frequency in the same fashion as the MagDipole.
:param list rxList: receiver list
:param float freq: frequency
:param numpy.ndarray loc: source location (ie: :code:`np.r_[xloc,yloc,zloc]`)
:param string orientation: 'X', 'Y', 'Z'
:param float moment: magnetic dipole moment
:param float mu: background magnetic permeability
"""
#TODO: right now, orientation doesn't actually do anything! The methods in SrcUtils should take care of that
#TODO: neither does moment
def __init__(self, rxList, freq, loc, orientation='Z', moment=1., mu = mu_0):
self.freq = float(freq)
self.loc = loc
self.orientation = orientation
self.moment = moment
self.mu = mu
BaseSrc.__init__(self, rxList)
def bPrimary(self, prob):
"""
The primary magnetic flux density from the analytic solution for magnetic fields from a dipole
:param Problem prob: FDEM problem
:rtype: numpy.ndarray
:return: primary magnetic field
"""
eqLocs = prob._eqLocs
if eqLocs is 'FE':
gridX = prob.mesh.gridFx
gridY = prob.mesh.gridFy
gridZ = prob.mesh.gridFz
C = prob.mesh.edgeCurl
elif eqLocs is 'EF':
gridX = prob.mesh.gridEx
gridY = prob.mesh.gridEy
gridZ = prob.mesh.gridEz
C = prob.mesh.edgeCurl.T
srcfct = MagneticDipoleFields
if prob.mesh._meshType is 'CYL':
if not prob.mesh.isSymmetric:
# TODO ?
raise NotImplementedError('Non-symmetric cyl mesh not implemented yet!')
bx = srcfct(self.loc, gridX, 'x', mu=self.mu, moment=self.moment)
bz = srcfct(self.loc, gridZ, 'z', mu=self.mu, moment=self.moment)
b = np.concatenate((bx,bz))
else:
bx = srcfct(self.loc, gridX, 'x', mu=self.mu, moment=self.moment)
by = srcfct(self.loc, gridY, 'y', mu=self.mu, moment=self.moment)
bz = srcfct(self.loc, gridZ, 'z', mu=self.mu, moment=self.moment)
b = np.concatenate((bx,by,bz))
return b
def hPrimary(self, prob):
"""
The primary magnetic field from a magnetic vector potential
:param Problem prob: FDEM problem
:rtype: numpy.ndarray
:return: primary magnetic field
"""
b = self.bPrimary(prob)
return h_from_b(prob, b)
def S_m(self, prob):
"""
The magnetic source term
:param Problem prob: FDEM problem
:rtype: numpy.ndarray
:return: primary magnetic field
"""
b = self.bPrimary(prob)
return -1j*omega(self.freq)*b
def S_e(self, prob):
"""
The electric source term
:param Problem prob: FDEM problem
:rtype: numpy.ndarray
:return: primary magnetic field
"""
if all(np.r_[self.mu] == np.r_[prob.curModel.mu]):
return Zero()
else:
eqLocs = prob._eqLocs
if eqLocs is 'FE':
mui_s = prob.curModel.mui - 1./self.mu
MMui_s = prob.mesh.getFaceInnerProduct(mui_s)
C = prob.mesh.edgeCurl
elif eqLocs is 'EF':
mu_s = prob.curModel.mu - self.mu
MMui_s = prob.mesh.getEdgeInnerProduct(mu_s,invMat=True)
C = prob.mesh.edgeCurl.T
return -C.T * (MMui_s * self.bPrimary(prob))
class CircularLoop(BaseSrc):
"""
Circular loop magnetic source calculated by taking the curl of a magnetic
vector potential. By taking the discrete curl, we ensure that the magnetic
flux density is divergence free (no magnetic monopoles!).
This approach uses a primary-secondary in frequency in the same fashion as the MagDipole.
:param list rxList: receiver list
:param float freq: frequency
:param numpy.ndarray loc: source location (ie: :code:`np.r_[xloc,yloc,zloc]`)
:param string orientation: 'X', 'Y', 'Z'
:param float moment: magnetic dipole moment
:param float mu: background magnetic permeability
"""
#TODO: right now, orientation doesn't actually do anything! The methods in SrcUtils should take care of that
def __init__(self, rxList, freq, loc, orientation='Z', radius = 1., mu=mu_0):
self.freq = float(freq)
self.orientation = orientation
self.radius = radius
self.mu = mu
self.loc = loc
self.integrate = False
BaseSrc.__init__(self, rxList)
def bPrimary(self, prob):
"""
The primary magnetic flux density from a magnetic vector potential
:param Problem prob: FDEM problem
:rtype: numpy.ndarray
:return: primary magnetic field
"""
eqLocs = prob._eqLocs
if eqLocs is 'FE':
gridX = prob.mesh.gridEx
gridY = prob.mesh.gridEy
gridZ = prob.mesh.gridEz
C = prob.mesh.edgeCurl
elif eqLocs is 'EF':
gridX = prob.mesh.gridFx
gridY = prob.mesh.gridFy
gridZ = prob.mesh.gridFz
C = prob.mesh.edgeCurl.T
if prob.mesh._meshType is 'CYL':
if not prob.mesh.isSymmetric:
# TODO ?
raise NotImplementedError('Non-symmetric cyl mesh not implemented yet!')
a = MagneticDipoleVectorPotential(self.loc, gridY, 'y', moment=self.radius, mu=self.mu)
else:
srcfct = MagneticDipoleVectorPotential
ax = srcfct(self.loc, gridX, 'x', self.radius, mu=self.mu)
ay = srcfct(self.loc, gridY, 'y', self.radius, mu=self.mu)
az = srcfct(self.loc, gridZ, 'z', self.radius, mu=self.mu)
a = np.concatenate((ax, ay, az))
return C*a
def hPrimary(self, prob):
"""
The primary magnetic field from a magnetic vector potential
:param Problem prob: FDEM problem
:rtype: numpy.ndarray
:return: primary magnetic field
"""
b = self.bPrimary(prob)
return 1./self.mu*b
def S_m(self, prob):
"""
The magnetic source term
:param Problem prob: FDEM problem
:rtype: numpy.ndarray
:return: primary magnetic field
"""
b = self.bPrimary(prob)
return -1j*omega(self.freq)*b
def S_e(self, prob):
"""
The electric source term
:param Problem prob: FDEM problem
:rtype: numpy.ndarray
:return: primary magnetic field
"""
if all(np.r_[self.mu] == np.r_[prob.curModel.mu]):
return Zero()
else:
eqLocs = prob._eqLocs
if eqLocs is 'FE':
mui_s = prob.curModel.mui - 1./self.mu
MMui_s = prob.mesh.getFaceInnerProduct(mui_s)
C = prob.mesh.edgeCurl
elif eqLocs is 'EF':
mu_s = prob.curModel.mu - self.mu
MMui_s = prob.mesh.getEdgeInnerProduct(mu_s,invMat=True)
C = prob.mesh.edgeCurl.T
return -C.T * (MMui_s * self.bPrimary(prob))
| 32.538462
| 172
| 0.589781
| 2,390
| 18,612
| 4.517992
| 0.1
| 0.031117
| 0.03408
| 0.0426
| 0.823023
| 0.787738
| 0.755232
| 0.74912
| 0.729024
| 0.708557
| 0
| 0.002006
| 0.30346
| 18,612
| 571
| 173
| 32.595447
| 0.830916
| 0.431496
| 0
| 0.77169
| 0
| 0
| 0.020102
| 0
| 0
| 0
| 0
| 0.010508
| 0
| 1
| 0.146119
| false
| 0
| 0.018265
| 0.004566
| 0.351598
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| null | 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 8
|
126301cdd4a18fe2e1448040805da7cca6acfdbb
| 118,789
|
py
|
Python
|
fhir/resources/STU3/tests/test_questionnaireresponse.py
|
mmabey/fhir.resources
|
cc73718e9762c04726cd7de240c8f2dd5313cbe1
|
[
"BSD-3-Clause"
] | null | null | null |
fhir/resources/STU3/tests/test_questionnaireresponse.py
|
mmabey/fhir.resources
|
cc73718e9762c04726cd7de240c8f2dd5313cbe1
|
[
"BSD-3-Clause"
] | null | null | null |
fhir/resources/STU3/tests/test_questionnaireresponse.py
|
mmabey/fhir.resources
|
cc73718e9762c04726cd7de240c8f2dd5313cbe1
|
[
"BSD-3-Clause"
] | null | null | null |
# -*- coding: utf-8 -*-
"""
Profile: http://hl7.org/fhir/StructureDefinition/QuestionnaireResponse
Release: STU3
Version: 3.0.2
Revision: 11917
Last updated: 2019-10-24T11:53:00+11:00
"""
import io
import json
import os
import unittest
import pytest
from .. import questionnaireresponse
from ..fhirdate import FHIRDate
from .fixtures import force_bytes
@pytest.mark.usefixtures("base_settings")
class QuestionnaireResponseTests(unittest.TestCase):
def instantiate_from(self, filename):
datadir = os.environ.get("FHIR_UNITTEST_DATADIR") or ""
with io.open(os.path.join(datadir, filename), "r", encoding="utf-8") as handle:
js = json.load(handle)
self.assertEqual("QuestionnaireResponse", js["resourceType"])
return questionnaireresponse.QuestionnaireResponse(js)
def testQuestionnaireResponse1(self):
inst = self.instantiate_from("questionnaireresponse-example.json")
self.assertIsNotNone(
inst, "Must have instantiated a QuestionnaireResponse instance"
)
self.implQuestionnaireResponse1(inst)
js = inst.as_json()
self.assertEqual("QuestionnaireResponse", js["resourceType"])
inst2 = questionnaireresponse.QuestionnaireResponse(js)
self.implQuestionnaireResponse1(inst2)
def implQuestionnaireResponse1(self, inst):
self.assertEqual(inst.authored.date, FHIRDate("2013-02-19T14:15:00-05:00").date)
self.assertEqual(inst.authored.as_json(), "2013-02-19T14:15:00-05:00")
self.assertEqual(force_bytes(inst.contained[0].id), force_bytes("patsub"))
self.assertEqual(force_bytes(inst.contained[1].id), force_bytes("order"))
self.assertEqual(force_bytes(inst.contained[2].id), force_bytes("questauth"))
self.assertEqual(force_bytes(inst.id), force_bytes("3141"))
self.assertEqual(
force_bytes(inst.identifier.system),
force_bytes("http://example.org/fhir/NamingSystem/questionnaire-ids"),
)
self.assertEqual(force_bytes(inst.identifier.value), force_bytes("Q12349876"))
self.assertEqual(
force_bytes(
inst.item[0]
.item[0]
.answer[0]
.item[0]
.item[0]
.answer[0]
.valueCoding.code
),
force_bytes("1"),
)
self.assertEqual(
force_bytes(
inst.item[0]
.item[0]
.answer[0]
.item[0]
.item[0]
.answer[0]
.valueCoding.system
),
force_bytes("http://cancer.questionnaire.org/system/code/yesno"),
)
self.assertEqual(
force_bytes(inst.item[0].item[0].answer[0].item[0].item[0].linkId),
force_bytes("1.1.1.1"),
)
self.assertEqual(
force_bytes(
inst.item[0]
.item[0]
.answer[0]
.item[0]
.item[1]
.answer[0]
.valueCoding.code
),
force_bytes("1"),
)
self.assertEqual(
force_bytes(
inst.item[0]
.item[0]
.answer[0]
.item[0]
.item[1]
.answer[0]
.valueCoding.system
),
force_bytes("http://cancer.questionnaire.org/system/code/yesno"),
)
self.assertEqual(
force_bytes(inst.item[0].item[0].answer[0].item[0].item[1].linkId),
force_bytes("1.1.1.2"),
)
self.assertEqual(
force_bytes(
inst.item[0]
.item[0]
.answer[0]
.item[0]
.item[2]
.answer[0]
.valueCoding.code
),
force_bytes("0"),
)
self.assertEqual(
force_bytes(
inst.item[0]
.item[0]
.answer[0]
.item[0]
.item[2]
.answer[0]
.valueCoding.system
),
force_bytes("http://cancer.questionnaire.org/system/code/yesno"),
)
self.assertEqual(
force_bytes(inst.item[0].item[0].answer[0].item[0].item[2].linkId),
force_bytes("1.1.1.3"),
)
self.assertEqual(
force_bytes(inst.item[0].item[0].answer[0].item[0].linkId),
force_bytes("1.1.1"),
)
self.assertEqual(
force_bytes(inst.item[0].item[0].answer[0].valueCoding.code),
force_bytes("1"),
)
self.assertEqual(
force_bytes(inst.item[0].item[0].answer[0].valueCoding.display),
force_bytes("Yes"),
)
self.assertEqual(
force_bytes(inst.item[0].item[0].answer[0].valueCoding.system),
force_bytes("http://cancer.questionnaire.org/system/code/yesno"),
)
self.assertEqual(force_bytes(inst.item[0].item[0].linkId), force_bytes("1.1"))
self.assertEqual(force_bytes(inst.item[0].linkId), force_bytes("1"))
self.assertEqual(force_bytes(inst.status), force_bytes("completed"))
self.assertEqual(force_bytes(inst.text.status), force_bytes("generated"))
def testQuestionnaireResponse2(self):
inst = self.instantiate_from(
"questionnaireresponse-example-ussg-fht-answers.json"
)
self.assertIsNotNone(
inst, "Must have instantiated a QuestionnaireResponse instance"
)
self.implQuestionnaireResponse2(inst)
js = inst.as_json()
self.assertEqual("QuestionnaireResponse", js["resourceType"])
inst2 = questionnaireresponse.QuestionnaireResponse(js)
self.implQuestionnaireResponse2(inst2)
def implQuestionnaireResponse2(self, inst):
self.assertEqual(inst.authored.date, FHIRDate("2008-01-17").date)
self.assertEqual(inst.authored.as_json(), "2008-01-17")
self.assertEqual(force_bytes(inst.id), force_bytes("ussg-fht-answers"))
self.assertEqual(
inst.item[0].item[0].answer[0].valueDate.date, FHIRDate("2008-01-17").date
)
self.assertEqual(
inst.item[0].item[0].answer[0].valueDate.as_json(), "2008-01-17"
)
self.assertEqual(force_bytes(inst.item[0].item[0].linkId), force_bytes("0.1"))
self.assertEqual(
force_bytes(inst.item[0].item[0].text), force_bytes("Date Done")
)
self.assertEqual(force_bytes(inst.item[0].linkId), force_bytes("0"))
self.assertEqual(
force_bytes(inst.item[1].definition),
force_bytes("http://loinc.org/fhir/DataElement/54126-8"),
)
self.assertEqual(
force_bytes(inst.item[1].item[0].item[0].answer[0].valueString),
force_bytes("Annie Proband"),
)
self.assertEqual(
force_bytes(inst.item[1].item[0].item[0].definition),
force_bytes("http://loinc.org/fhir/DataElement/54125-0"),
)
self.assertEqual(
force_bytes(inst.item[1].item[0].item[0].linkId), force_bytes("1.1.1")
)
self.assertEqual(
force_bytes(inst.item[1].item[0].item[0].text), force_bytes("Name")
)
self.assertEqual(
force_bytes(inst.item[1].item[0].item[1].answer[0].valueCoding.code),
force_bytes("LA3-6"),
)
self.assertEqual(
force_bytes(inst.item[1].item[0].item[1].answer[0].valueCoding.display),
force_bytes("Female"),
)
self.assertEqual(
force_bytes(inst.item[1].item[0].item[1].answer[0].valueCoding.system),
force_bytes("http://loinc.org"),
)
self.assertEqual(
force_bytes(inst.item[1].item[0].item[1].definition),
force_bytes("http://loinc.org/fhir/DataElement/54131-8"),
)
self.assertEqual(
force_bytes(inst.item[1].item[0].item[1].linkId), force_bytes("1.1.2")
)
self.assertEqual(
force_bytes(inst.item[1].item[0].item[1].text), force_bytes("Gender")
)
self.assertEqual(
inst.item[1].item[0].item[2].answer[0].valueDate.date,
FHIRDate("1966-04-04").date,
)
self.assertEqual(
inst.item[1].item[0].item[2].answer[0].valueDate.as_json(), "1966-04-04"
)
self.assertEqual(
force_bytes(inst.item[1].item[0].item[2].definition),
force_bytes("http://loinc.org/fhir/DataElement/21112-8"),
)
self.assertEqual(
force_bytes(inst.item[1].item[0].item[2].linkId), force_bytes("1.1.3")
)
self.assertEqual(
force_bytes(inst.item[1].item[0].item[2].text), force_bytes("Date of Birth")
)
self.assertEqual(
force_bytes(inst.item[1].item[0].item[3].answer[0].valueCoding.code),
force_bytes("LA32-8"),
)
self.assertEqual(
force_bytes(inst.item[1].item[0].item[3].answer[0].valueCoding.display),
force_bytes("No"),
)
self.assertEqual(
force_bytes(inst.item[1].item[0].item[3].answer[0].valueCoding.system),
force_bytes("http://loinc.org"),
)
self.assertEqual(
force_bytes(inst.item[1].item[0].item[3].definition),
force_bytes("http://loinc.org/fhir/DataElement/54132-6"),
)
self.assertEqual(
force_bytes(inst.item[1].item[0].item[3].linkId), force_bytes("1.1.4")
)
self.assertEqual(
force_bytes(inst.item[1].item[0].item[3].text),
force_bytes("Were you born a twin?"),
)
self.assertEqual(
force_bytes(inst.item[1].item[0].item[4].answer[0].valueCoding.code),
force_bytes("LA32-8"),
)
self.assertEqual(
force_bytes(inst.item[1].item[0].item[4].answer[0].valueCoding.display),
force_bytes("No"),
)
self.assertEqual(
force_bytes(inst.item[1].item[0].item[4].answer[0].valueCoding.system),
force_bytes("http://loinc.org"),
)
self.assertEqual(
force_bytes(inst.item[1].item[0].item[4].definition),
force_bytes("http://loinc.org/fhir/DataElement/54128-4"),
)
self.assertEqual(
force_bytes(inst.item[1].item[0].item[4].linkId), force_bytes("1.1.5")
)
self.assertEqual(
force_bytes(inst.item[1].item[0].item[4].text),
force_bytes("Were you adopted?"),
)
self.assertEqual(
force_bytes(inst.item[1].item[0].item[5].answer[0].valueCoding.code),
force_bytes("LA32-8"),
)
self.assertEqual(
force_bytes(inst.item[1].item[0].item[5].answer[0].valueCoding.display),
force_bytes("No"),
)
self.assertEqual(
force_bytes(inst.item[1].item[0].item[5].answer[0].valueCoding.system),
force_bytes("http://loinc.org"),
)
self.assertEqual(
force_bytes(inst.item[1].item[0].item[5].definition),
force_bytes("http://loinc.org/fhir/DataElement/54135-9"),
)
self.assertEqual(
force_bytes(inst.item[1].item[0].item[5].linkId), force_bytes("1.1.6")
)
self.assertEqual(
force_bytes(inst.item[1].item[0].item[5].text),
force_bytes(
"Are your parents related to each other in any way other than marriage?"
),
)
self.assertEqual(
force_bytes(
inst.item[1]
.item[0]
.item[6]
.answer[0]
.item[0]
.item[0]
.answer[0]
.valueCoding.code
),
force_bytes("[in_i]"),
)
self.assertEqual(
force_bytes(
inst.item[1]
.item[0]
.item[6]
.answer[0]
.item[0]
.item[0]
.answer[0]
.valueCoding.display
),
force_bytes("inches"),
)
self.assertEqual(
force_bytes(
inst.item[1]
.item[0]
.item[6]
.answer[0]
.item[0]
.item[0]
.answer[0]
.valueCoding.system
),
force_bytes("http://unitsofmeasure.org"),
)
self.assertEqual(
force_bytes(inst.item[1].item[0].item[6].answer[0].item[0].item[0].linkId),
force_bytes("1.1.7.1.1"),
)
self.assertEqual(
force_bytes(inst.item[1].item[0].item[6].answer[0].item[0].item[0].text),
force_bytes("Units"),
)
self.assertEqual(
force_bytes(inst.item[1].item[0].item[6].answer[0].item[0].linkId),
force_bytes("1.1.7.1"),
)
self.assertEqual(inst.item[1].item[0].item[6].answer[0].valueDecimal, 63)
self.assertEqual(
force_bytes(inst.item[1].item[0].item[6].definition),
force_bytes("http://loinc.org/fhir/DataElement/8302-2"),
)
self.assertEqual(
force_bytes(inst.item[1].item[0].item[6].linkId), force_bytes("1.1.7")
)
self.assertEqual(
force_bytes(inst.item[1].item[0].item[6].text), force_bytes("Height")
)
self.assertEqual(
force_bytes(
inst.item[1]
.item[0]
.item[7]
.answer[0]
.item[0]
.item[0]
.answer[0]
.valueCoding.code
),
force_bytes("lb"),
)
self.assertEqual(
force_bytes(
inst.item[1]
.item[0]
.item[7]
.answer[0]
.item[0]
.item[0]
.answer[0]
.valueCoding.display
),
force_bytes("pounds"),
)
self.assertEqual(
force_bytes(
inst.item[1]
.item[0]
.item[7]
.answer[0]
.item[0]
.item[0]
.answer[0]
.valueCoding.system
),
force_bytes("http://unitsofmeasure.org"),
)
self.assertEqual(
force_bytes(inst.item[1].item[0].item[7].answer[0].item[0].item[0].linkId),
force_bytes("1.1.8.1.1"),
)
self.assertEqual(
force_bytes(inst.item[1].item[0].item[7].answer[0].item[0].item[0].text),
force_bytes("Units"),
)
self.assertEqual(
force_bytes(inst.item[1].item[0].item[7].answer[0].item[0].linkId),
force_bytes("1.1.8.1"),
)
self.assertEqual(inst.item[1].item[0].item[7].answer[0].valueDecimal, 127)
self.assertEqual(
force_bytes(inst.item[1].item[0].item[7].definition),
force_bytes("http://loinc.org/fhir/DataElement/29463-7"),
)
self.assertEqual(
force_bytes(inst.item[1].item[0].item[7].linkId), force_bytes("1.1.8")
)
self.assertEqual(
force_bytes(inst.item[1].item[0].item[7].text), force_bytes("Weight")
)
self.assertEqual(inst.item[1].item[0].item[8].answer[0].valueDecimal, 22.5)
self.assertEqual(
force_bytes(inst.item[1].item[0].item[8].definition),
force_bytes("http://loinc.org/fhir/DataElement/39156-5"),
)
self.assertEqual(
force_bytes(inst.item[1].item[0].item[8].linkId), force_bytes("1.1.9")
)
self.assertEqual(
force_bytes(inst.item[1].item[0].item[8].text),
force_bytes("Body mass index (BMI) [Ratio]"),
)
self.assertEqual(
force_bytes(inst.item[1].item[0].item[9].answer[0].valueCoding.code),
force_bytes("LA4457-3"),
)
self.assertEqual(
force_bytes(inst.item[1].item[0].item[9].answer[0].valueCoding.display),
force_bytes("White"),
)
self.assertEqual(
force_bytes(inst.item[1].item[0].item[9].answer[0].valueCoding.system),
force_bytes("http://loinc.org"),
)
self.assertEqual(
force_bytes(inst.item[1].item[0].item[9].definition),
force_bytes("http://loinc.org/fhir/DataElement/54134-2"),
)
self.assertEqual(
force_bytes(inst.item[1].item[0].item[9].linkId), force_bytes("1.1.10")
)
self.assertEqual(
force_bytes(inst.item[1].item[0].item[9].text), force_bytes("Race")
)
self.assertEqual(force_bytes(inst.item[1].item[0].linkId), force_bytes("1.1"))
self.assertEqual(force_bytes(inst.item[1].linkId), force_bytes("1"))
self.assertEqual(
force_bytes(inst.item[1].text), force_bytes("Your health information")
)
self.assertEqual(
force_bytes(inst.item[2].definition),
force_bytes("http://loinc.org/fhir/DataElement/54114-4"),
)
self.assertEqual(
force_bytes(
inst.item[2].item[0].item[0].item[0].answer[0].valueCoding.code
),
force_bytes("LA10405-1"),
)
self.assertEqual(
force_bytes(
inst.item[2].item[0].item[0].item[0].answer[0].valueCoding.display
),
force_bytes("Daughter"),
)
self.assertEqual(
force_bytes(
inst.item[2].item[0].item[0].item[0].answer[0].valueCoding.system
),
force_bytes("http://loinc.org"),
)
self.assertEqual(
force_bytes(inst.item[2].item[0].item[0].item[0].definition),
force_bytes("http://loinc.org/fhir/DataElement/54136-7"),
)
self.assertEqual(
force_bytes(inst.item[2].item[0].item[0].item[0].linkId),
force_bytes("2.1.1.1"),
)
self.assertEqual(
force_bytes(inst.item[2].item[0].item[0].item[0].text),
force_bytes("Relationship to you"),
)
self.assertEqual(
force_bytes(inst.item[2].item[0].item[0].item[1].answer[0].valueString),
force_bytes("Susan"),
)
self.assertEqual(
force_bytes(inst.item[2].item[0].item[0].item[1].definition),
force_bytes("http://loinc.org/fhir/DataElement/54138-3"),
)
self.assertEqual(
force_bytes(inst.item[2].item[0].item[0].item[1].linkId),
force_bytes("2.1.1.2"),
)
self.assertEqual(
force_bytes(inst.item[2].item[0].item[0].item[1].text), force_bytes("Name")
)
self.assertEqual(
force_bytes(
inst.item[2].item[0].item[0].item[2].answer[0].valueCoding.code
),
force_bytes("LA3-6"),
)
self.assertEqual(
force_bytes(
inst.item[2].item[0].item[0].item[2].answer[0].valueCoding.display
),
force_bytes("Female"),
)
self.assertEqual(
force_bytes(
inst.item[2].item[0].item[0].item[2].answer[0].valueCoding.system
),
force_bytes("http://loinc.org"),
)
self.assertEqual(
force_bytes(inst.item[2].item[0].item[0].item[2].definition),
force_bytes("http://loinc.org/fhir/DataElement/54123-5"),
)
self.assertEqual(
force_bytes(inst.item[2].item[0].item[0].item[2].linkId),
force_bytes("2.1.1.3"),
)
self.assertEqual(
force_bytes(inst.item[2].item[0].item[0].item[2].text),
force_bytes("Gender"),
)
self.assertEqual(
inst.item[2]
.item[0]
.item[0]
.item[3]
.answer[0]
.item[0]
.item[0]
.answer[0]
.valueDecimal,
17,
)
self.assertEqual(
force_bytes(
inst.item[2]
.item[0]
.item[0]
.item[3]
.answer[0]
.item[0]
.item[0]
.definition
),
force_bytes("http://loinc.org/fhir/DataElement/54141-7"),
)
self.assertEqual(
force_bytes(
inst.item[2].item[0].item[0].item[3].answer[0].item[0].item[0].linkId
),
force_bytes("2.1.1.4.2.2"),
)
self.assertEqual(
force_bytes(
inst.item[2].item[0].item[0].item[3].answer[0].item[0].item[0].text
),
force_bytes("Age"),
)
self.assertEqual(
force_bytes(inst.item[2].item[0].item[0].item[3].answer[0].item[0].linkId),
force_bytes("2.1.1.4.2"),
)
self.assertEqual(
force_bytes(
inst.item[2].item[0].item[0].item[3].answer[0].valueCoding.code
),
force_bytes("LA33-6"),
)
self.assertEqual(
force_bytes(
inst.item[2].item[0].item[0].item[3].answer[0].valueCoding.display
),
force_bytes("Yes"),
)
self.assertEqual(
force_bytes(
inst.item[2].item[0].item[0].item[3].answer[0].valueCoding.system
),
force_bytes("http://loinc.org"),
)
self.assertEqual(
force_bytes(inst.item[2].item[0].item[0].item[3].definition),
force_bytes("http://loinc.org/fhir/DataElement/54139-1"),
)
self.assertEqual(
force_bytes(inst.item[2].item[0].item[0].item[3].linkId),
force_bytes("2.1.1.4"),
)
self.assertEqual(
force_bytes(inst.item[2].item[0].item[0].item[3].text),
force_bytes("Living?"),
)
self.assertEqual(
force_bytes(
inst.item[2].item[0].item[0].item[4].answer[0].valueCoding.code
),
force_bytes("LA32-8"),
)
self.assertEqual(
force_bytes(
inst.item[2].item[0].item[0].item[4].answer[0].valueCoding.display
),
force_bytes("No"),
)
self.assertEqual(
force_bytes(
inst.item[2].item[0].item[0].item[4].answer[0].valueCoding.system
),
force_bytes("http://loinc.org"),
)
self.assertEqual(
force_bytes(inst.item[2].item[0].item[0].item[4].definition),
force_bytes("http://loinc.org/fhir/DataElement/54121-9"),
)
self.assertEqual(
force_bytes(inst.item[2].item[0].item[0].item[4].linkId),
force_bytes("2.1.1.5"),
)
self.assertEqual(
force_bytes(inst.item[2].item[0].item[0].item[4].text),
force_bytes("Was this person born a twin?"),
)
self.assertEqual(
force_bytes(
inst.item[2].item[0].item[0].item[5].answer[0].valueCoding.code
),
force_bytes("LA32-8"),
)
self.assertEqual(
force_bytes(
inst.item[2].item[0].item[0].item[5].answer[0].valueCoding.display
),
force_bytes("No"),
)
self.assertEqual(
force_bytes(
inst.item[2].item[0].item[0].item[5].answer[0].valueCoding.system
),
force_bytes("http://loinc.org"),
)
self.assertEqual(
force_bytes(inst.item[2].item[0].item[0].item[5].definition),
force_bytes("http://loinc.org/fhir/DataElement/54122-7"),
)
self.assertEqual(
force_bytes(inst.item[2].item[0].item[0].item[5].linkId),
force_bytes("2.1.1.6"),
)
self.assertEqual(
force_bytes(inst.item[2].item[0].item[0].item[5].text),
force_bytes("Was this person adopted?"),
)
self.assertEqual(
force_bytes(inst.item[2].item[0].item[0].linkId), force_bytes("2.1.1")
)
self.assertEqual(force_bytes(inst.item[2].item[0].linkId), force_bytes("2.1"))
self.assertEqual(
force_bytes(
inst.item[2].item[1].item[0].item[0].answer[0].valueCoding.code
),
force_bytes("LA10415-0"),
)
self.assertEqual(
force_bytes(
inst.item[2].item[1].item[0].item[0].answer[0].valueCoding.display
),
force_bytes("Brother"),
)
self.assertEqual(
force_bytes(
inst.item[2].item[1].item[0].item[0].answer[0].valueCoding.system
),
force_bytes("http://loinc.org"),
)
self.assertEqual(
force_bytes(inst.item[2].item[1].item[0].item[0].definition),
force_bytes("http://loinc.org/fhir/DataElement/54136-7"),
)
self.assertEqual(
force_bytes(inst.item[2].item[1].item[0].item[0].linkId),
force_bytes("2.1.1.1"),
)
self.assertEqual(
force_bytes(inst.item[2].item[1].item[0].item[0].text),
force_bytes("Relationship to you"),
)
self.assertEqual(
force_bytes(inst.item[2].item[1].item[0].item[1].answer[0].valueString),
force_bytes("Brian"),
)
self.assertEqual(
force_bytes(inst.item[2].item[1].item[0].item[1].definition),
force_bytes("http://loinc.org/fhir/DataElement/54138-3"),
)
self.assertEqual(
force_bytes(inst.item[2].item[1].item[0].item[1].linkId),
force_bytes("2.1.1.2"),
)
self.assertEqual(
force_bytes(inst.item[2].item[1].item[0].item[1].text), force_bytes("Name")
)
self.assertEqual(
force_bytes(
inst.item[2].item[1].item[0].item[2].answer[0].valueCoding.code
),
force_bytes("LA2-8"),
)
self.assertEqual(
force_bytes(
inst.item[2].item[1].item[0].item[2].answer[0].valueCoding.display
),
force_bytes("Male"),
)
self.assertEqual(
force_bytes(
inst.item[2].item[1].item[0].item[2].answer[0].valueCoding.system
),
force_bytes("http://loinc.org"),
)
self.assertEqual(
force_bytes(inst.item[2].item[1].item[0].item[2].definition),
force_bytes("http://loinc.org/fhir/DataElement/54123-5"),
)
self.assertEqual(
force_bytes(inst.item[2].item[1].item[0].item[2].linkId),
force_bytes("2.1.1.3"),
)
self.assertEqual(
force_bytes(inst.item[2].item[1].item[0].item[2].text),
force_bytes("Gender"),
)
self.assertEqual(
inst.item[2]
.item[1]
.item[0]
.item[3]
.answer[0]
.item[0]
.item[0]
.answer[0]
.valueDecimal,
32,
)
self.assertEqual(
force_bytes(
inst.item[2]
.item[1]
.item[0]
.item[3]
.answer[0]
.item[0]
.item[0]
.definition
),
force_bytes("http://loinc.org/fhir/DataElement/54141-7"),
)
self.assertEqual(
force_bytes(
inst.item[2].item[1].item[0].item[3].answer[0].item[0].item[0].linkId
),
force_bytes("2.1.1.4.2.2"),
)
self.assertEqual(
force_bytes(
inst.item[2].item[1].item[0].item[3].answer[0].item[0].item[0].text
),
force_bytes("Age"),
)
self.assertEqual(
force_bytes(inst.item[2].item[1].item[0].item[3].answer[0].item[0].linkId),
force_bytes("2.1.1.4.2"),
)
self.assertEqual(
force_bytes(
inst.item[2].item[1].item[0].item[3].answer[0].valueCoding.code
),
force_bytes("LA33-6"),
)
self.assertEqual(
force_bytes(
inst.item[2].item[1].item[0].item[3].answer[0].valueCoding.display
),
force_bytes("Yes"),
)
self.assertEqual(
force_bytes(
inst.item[2].item[1].item[0].item[3].answer[0].valueCoding.system
),
force_bytes("http://loinc.org"),
)
self.assertEqual(
force_bytes(inst.item[2].item[1].item[0].item[3].definition),
force_bytes("http://loinc.org/fhir/DataElement/54139-1"),
)
self.assertEqual(
force_bytes(inst.item[2].item[1].item[0].item[3].linkId),
force_bytes("2.1.1.4"),
)
self.assertEqual(
force_bytes(inst.item[2].item[1].item[0].item[3].text),
force_bytes("Living?"),
)
self.assertEqual(
force_bytes(
inst.item[2].item[1].item[0].item[4].answer[0].valueCoding.code
),
force_bytes("LA32-8"),
)
self.assertEqual(
force_bytes(
inst.item[2].item[1].item[0].item[4].answer[0].valueCoding.display
),
force_bytes("No"),
)
self.assertEqual(
force_bytes(
inst.item[2].item[1].item[0].item[4].answer[0].valueCoding.system
),
force_bytes("http://loinc.org"),
)
self.assertEqual(
force_bytes(inst.item[2].item[1].item[0].item[4].definition),
force_bytes("http://loinc.org/fhir/DataElement/54121-9"),
)
self.assertEqual(
force_bytes(inst.item[2].item[1].item[0].item[4].linkId),
force_bytes("2.1.1.5"),
)
self.assertEqual(
force_bytes(inst.item[2].item[1].item[0].item[4].text),
force_bytes("Was this person born a twin?"),
)
self.assertEqual(
force_bytes(
inst.item[2].item[1].item[0].item[5].answer[0].valueCoding.code
),
force_bytes("LA32-8"),
)
self.assertEqual(
force_bytes(
inst.item[2].item[1].item[0].item[5].answer[0].valueCoding.display
),
force_bytes("No"),
)
self.assertEqual(
force_bytes(
inst.item[2].item[1].item[0].item[5].answer[0].valueCoding.system
),
force_bytes("http://loinc.org"),
)
self.assertEqual(
force_bytes(inst.item[2].item[1].item[0].item[5].definition),
force_bytes("http://loinc.org/fhir/DataElement/54122-7"),
)
self.assertEqual(
force_bytes(inst.item[2].item[1].item[0].item[5].linkId),
force_bytes("2.1.1.6"),
)
self.assertEqual(
force_bytes(inst.item[2].item[1].item[0].item[5].text),
force_bytes("Was this person adopted?"),
)
self.assertEqual(
force_bytes(inst.item[2].item[1].item[0].linkId), force_bytes("2.1.1")
)
self.assertEqual(
force_bytes(
inst.item[2].item[1].item[1].item[0].answer[0].valueCoding.code
),
force_bytes("LA10550-4"),
)
self.assertEqual(
force_bytes(
inst.item[2].item[1].item[1].item[0].answer[0].valueCoding.display
),
force_bytes("-- Other Cancer"),
)
self.assertEqual(
force_bytes(
inst.item[2].item[1].item[1].item[0].answer[0].valueCoding.system
),
force_bytes("http://loinc.org"),
)
self.assertEqual(
force_bytes(inst.item[2].item[1].item[1].item[0].linkId),
force_bytes("2.1.2.1"),
)
self.assertEqual(
force_bytes(inst.item[2].item[1].item[1].item[0].text),
force_bytes("Disease or Condition"),
)
self.assertEqual(
force_bytes(
inst.item[2].item[1].item[1].item[1].answer[0].valueCoding.code
),
force_bytes("LA10397-0"),
)
self.assertEqual(
force_bytes(
inst.item[2].item[1].item[1].item[1].answer[0].valueCoding.display
),
force_bytes("30-39"),
)
self.assertEqual(
force_bytes(
inst.item[2].item[1].item[1].item[1].answer[0].valueCoding.system
),
force_bytes("http://loinc.org"),
)
self.assertEqual(
force_bytes(inst.item[2].item[1].item[1].item[1].linkId),
force_bytes("2.1.2.2"),
)
self.assertEqual(
force_bytes(inst.item[2].item[1].item[1].item[1].text),
force_bytes("Age at Diagnosis"),
)
self.assertEqual(
force_bytes(inst.item[2].item[1].item[1].linkId), force_bytes("2.1.2")
)
self.assertEqual(
force_bytes(inst.item[2].item[1].item[1].text),
force_bytes("This family member's history of disease"),
)
self.assertEqual(force_bytes(inst.item[2].item[1].linkId), force_bytes("2.1"))
self.assertEqual(
force_bytes(
inst.item[2].item[2].item[0].item[0].answer[0].valueCoding.code
),
force_bytes("LA10418-4"),
)
self.assertEqual(
force_bytes(
inst.item[2].item[2].item[0].item[0].answer[0].valueCoding.display
),
force_bytes("Sister"),
)
self.assertEqual(
force_bytes(
inst.item[2].item[2].item[0].item[0].answer[0].valueCoding.system
),
force_bytes("http://loinc.org"),
)
self.assertEqual(
force_bytes(inst.item[2].item[2].item[0].item[0].definition),
force_bytes("http://loinc.org/fhir/DataElement/54136-7"),
)
self.assertEqual(
force_bytes(inst.item[2].item[2].item[0].item[0].linkId),
force_bytes("2.1.1.1"),
)
self.assertEqual(
force_bytes(inst.item[2].item[2].item[0].item[0].text),
force_bytes("Relationship to you"),
)
self.assertEqual(
force_bytes(inst.item[2].item[2].item[0].item[1].answer[0].valueString),
force_bytes("Janet"),
)
self.assertEqual(
force_bytes(inst.item[2].item[2].item[0].item[1].definition),
force_bytes("http://loinc.org/fhir/DataElement/54138-3"),
)
self.assertEqual(
force_bytes(inst.item[2].item[2].item[0].item[1].linkId),
force_bytes("2.1.1.2"),
)
self.assertEqual(
force_bytes(inst.item[2].item[2].item[0].item[1].text), force_bytes("Name")
)
self.assertEqual(
force_bytes(
inst.item[2].item[2].item[0].item[2].answer[0].valueCoding.code
),
force_bytes("LA3-6"),
)
self.assertEqual(
force_bytes(
inst.item[2].item[2].item[0].item[2].answer[0].valueCoding.display
),
force_bytes("Female"),
)
self.assertEqual(
force_bytes(
inst.item[2].item[2].item[0].item[2].answer[0].valueCoding.system
),
force_bytes("http://loinc.org"),
)
self.assertEqual(
force_bytes(inst.item[2].item[2].item[0].item[2].definition),
force_bytes("http://loinc.org/fhir/DataElement/54123-5"),
)
self.assertEqual(
force_bytes(inst.item[2].item[2].item[0].item[2].linkId),
force_bytes("2.1.1.3"),
)
self.assertEqual(
force_bytes(inst.item[2].item[2].item[0].item[2].text),
force_bytes("Gender"),
)
self.assertEqual(
inst.item[2]
.item[2]
.item[0]
.item[3]
.answer[0]
.item[0]
.item[0]
.answer[0]
.valueDecimal,
36,
)
self.assertEqual(
force_bytes(
inst.item[2]
.item[2]
.item[0]
.item[3]
.answer[0]
.item[0]
.item[0]
.definition
),
force_bytes("http://loinc.org/fhir/DataElement/54141-7"),
)
self.assertEqual(
force_bytes(
inst.item[2].item[2].item[0].item[3].answer[0].item[0].item[0].linkId
),
force_bytes("2.1.1.4.2.2"),
)
self.assertEqual(
force_bytes(
inst.item[2].item[2].item[0].item[3].answer[0].item[0].item[0].text
),
force_bytes("Age"),
)
self.assertEqual(
force_bytes(inst.item[2].item[2].item[0].item[3].answer[0].item[0].linkId),
force_bytes("2.1.1.4.2"),
)
self.assertEqual(
force_bytes(
inst.item[2].item[2].item[0].item[3].answer[0].valueCoding.code
),
force_bytes("LA33-6"),
)
self.assertEqual(
force_bytes(
inst.item[2].item[2].item[0].item[3].answer[0].valueCoding.display
),
force_bytes("Yes"),
)
self.assertEqual(
force_bytes(
inst.item[2].item[2].item[0].item[3].answer[0].valueCoding.system
),
force_bytes("http://loinc.org"),
)
self.assertEqual(
force_bytes(inst.item[2].item[2].item[0].item[3].definition),
force_bytes("http://loinc.org/fhir/DataElement/54139-1"),
)
self.assertEqual(
force_bytes(inst.item[2].item[2].item[0].item[3].linkId),
force_bytes("2.1.1.4"),
)
self.assertEqual(
force_bytes(inst.item[2].item[2].item[0].item[3].text),
force_bytes("Living?"),
)
self.assertEqual(
force_bytes(
inst.item[2].item[2].item[0].item[4].answer[0].valueCoding.code
),
force_bytes("LA32-8"),
)
self.assertEqual(
force_bytes(
inst.item[2].item[2].item[0].item[4].answer[0].valueCoding.display
),
force_bytes("No"),
)
self.assertEqual(
force_bytes(
inst.item[2].item[2].item[0].item[4].answer[0].valueCoding.system
),
force_bytes("http://loinc.org"),
)
self.assertEqual(
force_bytes(inst.item[2].item[2].item[0].item[4].definition),
force_bytes("http://loinc.org/fhir/DataElement/54121-9"),
)
self.assertEqual(
force_bytes(inst.item[2].item[2].item[0].item[4].linkId),
force_bytes("2.1.1.5"),
)
self.assertEqual(
force_bytes(inst.item[2].item[2].item[0].item[4].text),
force_bytes("Was this person born a twin?"),
)
self.assertEqual(
force_bytes(
inst.item[2].item[2].item[0].item[5].answer[0].valueCoding.code
),
force_bytes("LA32-8"),
)
self.assertEqual(
force_bytes(
inst.item[2].item[2].item[0].item[5].answer[0].valueCoding.display
),
force_bytes("No"),
)
self.assertEqual(
force_bytes(
inst.item[2].item[2].item[0].item[5].answer[0].valueCoding.system
),
force_bytes("http://loinc.org"),
)
self.assertEqual(
force_bytes(inst.item[2].item[2].item[0].item[5].definition),
force_bytes("http://loinc.org/fhir/DataElement/54122-7"),
)
self.assertEqual(
force_bytes(inst.item[2].item[2].item[0].item[5].linkId),
force_bytes("2.1.1.6"),
)
self.assertEqual(
force_bytes(inst.item[2].item[2].item[0].item[5].text),
force_bytes("Was this person adopted?"),
)
self.assertEqual(
force_bytes(inst.item[2].item[2].item[0].linkId), force_bytes("2.1.1")
)
self.assertEqual(
force_bytes(
inst.item[2].item[2].item[1].item[0].answer[0].valueCoding.code
),
force_bytes("LA10536-3"),
)
self.assertEqual(
force_bytes(
inst.item[2].item[2].item[1].item[0].answer[0].valueCoding.display
),
force_bytes("-- Breast Cancer"),
)
self.assertEqual(
force_bytes(
inst.item[2].item[2].item[1].item[0].answer[0].valueCoding.system
),
force_bytes("http://loinc.org"),
)
self.assertEqual(
force_bytes(inst.item[2].item[2].item[1].item[0].linkId),
force_bytes("2.1.2.1"),
)
self.assertEqual(
force_bytes(inst.item[2].item[2].item[1].item[0].text),
force_bytes("Disease or Condition"),
)
self.assertEqual(
force_bytes(
inst.item[2].item[2].item[1].item[1].answer[0].valueCoding.code
),
force_bytes("LA10397-0"),
)
self.assertEqual(
force_bytes(
inst.item[2].item[2].item[1].item[1].answer[0].valueCoding.display
),
force_bytes("30-39"),
)
self.assertEqual(
force_bytes(
inst.item[2].item[2].item[1].item[1].answer[0].valueCoding.system
),
force_bytes("http://loinc.org"),
)
self.assertEqual(
force_bytes(inst.item[2].item[2].item[1].item[1].linkId),
force_bytes("2.1.2.2"),
)
self.assertEqual(
force_bytes(inst.item[2].item[2].item[1].item[1].text),
force_bytes("Age at Diagnosis"),
)
self.assertEqual(
force_bytes(inst.item[2].item[2].item[1].linkId), force_bytes("2.1.2")
)
self.assertEqual(
force_bytes(inst.item[2].item[2].item[1].text),
force_bytes("This family member's history of disease"),
)
self.assertEqual(force_bytes(inst.item[2].item[2].linkId), force_bytes("2.1"))
self.assertEqual(
force_bytes(
inst.item[2].item[3].item[0].item[0].answer[0].valueCoding.code
),
force_bytes("LA10419-2"),
)
self.assertEqual(
force_bytes(
inst.item[2].item[3].item[0].item[0].answer[0].valueCoding.display
),
force_bytes("Nephew"),
)
self.assertEqual(
force_bytes(
inst.item[2].item[3].item[0].item[0].answer[0].valueCoding.system
),
force_bytes("http://loinc.org"),
)
self.assertEqual(
force_bytes(inst.item[2].item[3].item[0].item[0].definition),
force_bytes("http://loinc.org/fhir/DataElement/54136-7"),
)
self.assertEqual(
force_bytes(inst.item[2].item[3].item[0].item[0].linkId),
force_bytes("2.1.1.1"),
)
self.assertEqual(
force_bytes(inst.item[2].item[3].item[0].item[0].text),
force_bytes("Relationship to you"),
)
self.assertEqual(
force_bytes(inst.item[2].item[3].item[0].item[1].answer[0].valueString),
force_bytes("Ian"),
)
self.assertEqual(
force_bytes(inst.item[2].item[3].item[0].item[1].definition),
force_bytes("http://loinc.org/fhir/DataElement/54138-3"),
)
self.assertEqual(
force_bytes(inst.item[2].item[3].item[0].item[1].linkId),
force_bytes("2.1.1.2"),
)
self.assertEqual(
force_bytes(inst.item[2].item[3].item[0].item[1].text), force_bytes("Name")
)
self.assertEqual(
force_bytes(
inst.item[2].item[3].item[0].item[2].answer[0].valueCoding.code
),
force_bytes("LA2-8"),
)
self.assertEqual(
force_bytes(
inst.item[2].item[3].item[0].item[2].answer[0].valueCoding.display
),
force_bytes("Male"),
)
self.assertEqual(
force_bytes(
inst.item[2].item[3].item[0].item[2].answer[0].valueCoding.system
),
force_bytes("http://loinc.org"),
)
self.assertEqual(
force_bytes(inst.item[2].item[3].item[0].item[2].definition),
force_bytes("http://loinc.org/fhir/DataElement/54123-5"),
)
self.assertEqual(
force_bytes(inst.item[2].item[3].item[0].item[2].linkId),
force_bytes("2.1.1.3"),
)
self.assertEqual(
force_bytes(inst.item[2].item[3].item[0].item[2].text),
force_bytes("Gender"),
)
self.assertEqual(
inst.item[2]
.item[3]
.item[0]
.item[3]
.answer[0]
.item[0]
.item[0]
.answer[0]
.valueDecimal,
16,
)
self.assertEqual(
force_bytes(
inst.item[2]
.item[3]
.item[0]
.item[3]
.answer[0]
.item[0]
.item[0]
.definition
),
force_bytes("http://loinc.org/fhir/DataElement/54141-7"),
)
self.assertEqual(
force_bytes(
inst.item[2].item[3].item[0].item[3].answer[0].item[0].item[0].linkId
),
force_bytes("2.1.1.4.2.2"),
)
self.assertEqual(
force_bytes(
inst.item[2].item[3].item[0].item[3].answer[0].item[0].item[0].text
),
force_bytes("Age"),
)
self.assertEqual(
force_bytes(inst.item[2].item[3].item[0].item[3].answer[0].item[0].linkId),
force_bytes("2.1.1.4.2"),
)
self.assertEqual(
force_bytes(
inst.item[2].item[3].item[0].item[3].answer[0].valueCoding.code
),
force_bytes("LA33-6"),
)
self.assertEqual(
force_bytes(
inst.item[2].item[3].item[0].item[3].answer[0].valueCoding.display
),
force_bytes("Yes"),
)
self.assertEqual(
force_bytes(
inst.item[2].item[3].item[0].item[3].answer[0].valueCoding.system
),
force_bytes("http://loinc.org"),
)
self.assertEqual(
force_bytes(inst.item[2].item[3].item[0].item[3].definition),
force_bytes("http://loinc.org/fhir/DataElement/54139-1"),
)
self.assertEqual(
force_bytes(inst.item[2].item[3].item[0].item[3].linkId),
force_bytes("2.1.1.4"),
)
self.assertEqual(
force_bytes(inst.item[2].item[3].item[0].item[3].text),
force_bytes("Living?"),
)
self.assertEqual(
force_bytes(
inst.item[2].item[3].item[0].item[4].answer[0].valueCoding.code
),
force_bytes("LA32-8"),
)
self.assertEqual(
force_bytes(
inst.item[2].item[3].item[0].item[4].answer[0].valueCoding.display
),
force_bytes("No"),
)
self.assertEqual(
force_bytes(
inst.item[2].item[3].item[0].item[4].answer[0].valueCoding.system
),
force_bytes("http://loinc.org"),
)
self.assertEqual(
force_bytes(inst.item[2].item[3].item[0].item[4].definition),
force_bytes("http://loinc.org/fhir/DataElement/54121-9"),
)
self.assertEqual(
force_bytes(inst.item[2].item[3].item[0].item[4].linkId),
force_bytes("2.1.1.5"),
)
self.assertEqual(
force_bytes(inst.item[2].item[3].item[0].item[4].text),
force_bytes("Was this person born a twin?"),
)
self.assertEqual(
force_bytes(
inst.item[2].item[3].item[0].item[5].answer[0].valueCoding.code
),
force_bytes("LA32-8"),
)
self.assertEqual(
force_bytes(
inst.item[2].item[3].item[0].item[5].answer[0].valueCoding.display
),
force_bytes("No"),
)
self.assertEqual(
force_bytes(
inst.item[2].item[3].item[0].item[5].answer[0].valueCoding.system
),
force_bytes("http://loinc.org"),
)
self.assertEqual(
force_bytes(inst.item[2].item[3].item[0].item[5].definition),
force_bytes("http://loinc.org/fhir/DataElement/54122-7"),
)
self.assertEqual(
force_bytes(inst.item[2].item[3].item[0].item[5].linkId),
force_bytes("2.1.1.6"),
)
self.assertEqual(
force_bytes(inst.item[2].item[3].item[0].item[5].text),
force_bytes("Was this person adopted?"),
)
self.assertEqual(
force_bytes(inst.item[2].item[3].item[0].linkId), force_bytes("2.1.1")
)
self.assertEqual(force_bytes(inst.item[2].item[3].linkId), force_bytes("2.1"))
self.assertEqual(
force_bytes(
inst.item[2].item[4].item[0].item[0].answer[0].valueCoding.code
),
force_bytes("LA10420-0"),
)
self.assertEqual(
force_bytes(
inst.item[2].item[4].item[0].item[0].answer[0].valueCoding.display
),
force_bytes("Niece"),
)
self.assertEqual(
force_bytes(
inst.item[2].item[4].item[0].item[0].answer[0].valueCoding.system
),
force_bytes("http://loinc.org"),
)
self.assertEqual(
force_bytes(inst.item[2].item[4].item[0].item[0].definition),
force_bytes("http://loinc.org/fhir/DataElement/54136-7"),
)
self.assertEqual(
force_bytes(inst.item[2].item[4].item[0].item[0].linkId),
force_bytes("2.1.1.1"),
)
self.assertEqual(
force_bytes(inst.item[2].item[4].item[0].item[0].text),
force_bytes("Relationship to you"),
)
self.assertEqual(
force_bytes(inst.item[2].item[4].item[0].item[1].answer[0].valueString),
force_bytes("Helen"),
)
self.assertEqual(
force_bytes(inst.item[2].item[4].item[0].item[1].definition),
force_bytes("http://loinc.org/fhir/DataElement/54138-3"),
)
self.assertEqual(
force_bytes(inst.item[2].item[4].item[0].item[1].linkId),
force_bytes("2.1.1.2"),
)
self.assertEqual(
force_bytes(inst.item[2].item[4].item[0].item[1].text), force_bytes("Name")
)
self.assertEqual(
force_bytes(
inst.item[2].item[4].item[0].item[2].answer[0].valueCoding.code
),
force_bytes("LA3-6"),
)
self.assertEqual(
force_bytes(
inst.item[2].item[4].item[0].item[2].answer[0].valueCoding.display
),
force_bytes("Female"),
)
self.assertEqual(
force_bytes(
inst.item[2].item[4].item[0].item[2].answer[0].valueCoding.system
),
force_bytes("http://loinc.org"),
)
self.assertEqual(
force_bytes(inst.item[2].item[4].item[0].item[2].definition),
force_bytes("http://loinc.org/fhir/DataElement/54123-5"),
)
self.assertEqual(
force_bytes(inst.item[2].item[4].item[0].item[2].linkId),
force_bytes("2.1.1.3"),
)
self.assertEqual(
force_bytes(inst.item[2].item[4].item[0].item[2].text),
force_bytes("Gender"),
)
self.assertEqual(
inst.item[2]
.item[4]
.item[0]
.item[3]
.answer[0]
.item[0]
.item[0]
.answer[0]
.valueDecimal,
15,
)
self.assertEqual(
force_bytes(
inst.item[2]
.item[4]
.item[0]
.item[3]
.answer[0]
.item[0]
.item[0]
.definition
),
force_bytes("http://loinc.org/fhir/DataElement/54141-7"),
)
self.assertEqual(
force_bytes(
inst.item[2].item[4].item[0].item[3].answer[0].item[0].item[0].linkId
),
force_bytes("2.1.1.4.2.2"),
)
self.assertEqual(
force_bytes(
inst.item[2].item[4].item[0].item[3].answer[0].item[0].item[0].text
),
force_bytes("Age"),
)
self.assertEqual(
force_bytes(inst.item[2].item[4].item[0].item[3].answer[0].item[0].linkId),
force_bytes("2.1.1.4.2"),
)
self.assertEqual(
force_bytes(
inst.item[2].item[4].item[0].item[3].answer[0].valueCoding.code
),
force_bytes("LA33-6"),
)
self.assertEqual(
force_bytes(
inst.item[2].item[4].item[0].item[3].answer[0].valueCoding.display
),
force_bytes("Yes"),
)
self.assertEqual(
force_bytes(
inst.item[2].item[4].item[0].item[3].answer[0].valueCoding.system
),
force_bytes("http://loinc.org"),
)
self.assertEqual(
force_bytes(inst.item[2].item[4].item[0].item[3].definition),
force_bytes("http://loinc.org/fhir/DataElement/54139-1"),
)
self.assertEqual(
force_bytes(inst.item[2].item[4].item[0].item[3].linkId),
force_bytes("2.1.1.4"),
)
self.assertEqual(
force_bytes(inst.item[2].item[4].item[0].item[3].text),
force_bytes("Living?"),
)
self.assertEqual(
force_bytes(
inst.item[2].item[4].item[0].item[4].answer[0].valueCoding.code
),
force_bytes("LA32-8"),
)
self.assertEqual(
force_bytes(
inst.item[2].item[4].item[0].item[4].answer[0].valueCoding.display
),
force_bytes("No"),
)
self.assertEqual(
force_bytes(
inst.item[2].item[4].item[0].item[4].answer[0].valueCoding.system
),
force_bytes("http://loinc.org"),
)
self.assertEqual(
force_bytes(inst.item[2].item[4].item[0].item[4].definition),
force_bytes("http://loinc.org/fhir/DataElement/54121-9"),
)
self.assertEqual(
force_bytes(inst.item[2].item[4].item[0].item[4].linkId),
force_bytes("2.1.1.5"),
)
self.assertEqual(
force_bytes(inst.item[2].item[4].item[0].item[4].text),
force_bytes("Was this person born a twin?"),
)
self.assertEqual(
force_bytes(
inst.item[2].item[4].item[0].item[5].answer[0].valueCoding.code
),
force_bytes("LA32-8"),
)
self.assertEqual(
force_bytes(
inst.item[2].item[4].item[0].item[5].answer[0].valueCoding.display
),
force_bytes("No"),
)
self.assertEqual(
force_bytes(
inst.item[2].item[4].item[0].item[5].answer[0].valueCoding.system
),
force_bytes("http://loinc.org"),
)
self.assertEqual(
force_bytes(inst.item[2].item[4].item[0].item[5].definition),
force_bytes("http://loinc.org/fhir/DataElement/54122-7"),
)
self.assertEqual(
force_bytes(inst.item[2].item[4].item[0].item[5].linkId),
force_bytes("2.1.1.6"),
)
self.assertEqual(
force_bytes(inst.item[2].item[4].item[0].item[5].text),
force_bytes("Was this person adopted?"),
)
self.assertEqual(
force_bytes(inst.item[2].item[4].item[0].linkId), force_bytes("2.1.1")
)
self.assertEqual(force_bytes(inst.item[2].item[4].linkId), force_bytes("2.1"))
self.assertEqual(
force_bytes(
inst.item[2].item[5].item[0].item[0].answer[0].valueCoding.code
),
force_bytes("LA10416-8"),
)
self.assertEqual(
force_bytes(
inst.item[2].item[5].item[0].item[0].answer[0].valueCoding.display
),
force_bytes("Father"),
)
self.assertEqual(
force_bytes(
inst.item[2].item[5].item[0].item[0].answer[0].valueCoding.system
),
force_bytes("http://loinc.org"),
)
self.assertEqual(
force_bytes(inst.item[2].item[5].item[0].item[0].definition),
force_bytes("http://loinc.org/fhir/DataElement/54136-7"),
)
self.assertEqual(
force_bytes(inst.item[2].item[5].item[0].item[0].linkId),
force_bytes("2.1.1.1"),
)
self.assertEqual(
force_bytes(inst.item[2].item[5].item[0].item[0].text),
force_bytes("Relationship to you"),
)
self.assertEqual(
force_bytes(inst.item[2].item[5].item[0].item[1].answer[0].valueString),
force_bytes("Donald"),
)
self.assertEqual(
force_bytes(inst.item[2].item[5].item[0].item[1].definition),
force_bytes("http://loinc.org/fhir/DataElement/54138-3"),
)
self.assertEqual(
force_bytes(inst.item[2].item[5].item[0].item[1].linkId),
force_bytes("2.1.1.2"),
)
self.assertEqual(
force_bytes(inst.item[2].item[5].item[0].item[1].text), force_bytes("Name")
)
self.assertEqual(
force_bytes(
inst.item[2].item[5].item[0].item[2].answer[0].valueCoding.code
),
force_bytes("LA2-8"),
)
self.assertEqual(
force_bytes(
inst.item[2].item[5].item[0].item[2].answer[0].valueCoding.display
),
force_bytes("Male"),
)
self.assertEqual(
force_bytes(
inst.item[2].item[5].item[0].item[2].answer[0].valueCoding.system
),
force_bytes("http://loinc.org"),
)
self.assertEqual(
force_bytes(inst.item[2].item[5].item[0].item[2].definition),
force_bytes("http://loinc.org/fhir/DataElement/54123-5"),
)
self.assertEqual(
force_bytes(inst.item[2].item[5].item[0].item[2].linkId),
force_bytes("2.1.1.3"),
)
self.assertEqual(
force_bytes(inst.item[2].item[5].item[0].item[2].text),
force_bytes("Gender"),
)
self.assertEqual(
inst.item[2]
.item[5]
.item[0]
.item[3]
.answer[0]
.item[0]
.item[0]
.answer[0]
.valueDecimal,
52,
)
self.assertEqual(
force_bytes(
inst.item[2]
.item[5]
.item[0]
.item[3]
.answer[0]
.item[0]
.item[0]
.definition
),
force_bytes("http://loinc.org/fhir/DataElement/54141-7"),
)
self.assertEqual(
force_bytes(
inst.item[2].item[5].item[0].item[3].answer[0].item[0].item[0].linkId
),
force_bytes("2.1.1.4.2.2"),
)
self.assertEqual(
force_bytes(
inst.item[2].item[5].item[0].item[3].answer[0].item[0].item[0].text
),
force_bytes("Age"),
)
self.assertEqual(
force_bytes(inst.item[2].item[5].item[0].item[3].answer[0].item[0].linkId),
force_bytes("2.1.1.4.2"),
)
self.assertEqual(
force_bytes(
inst.item[2].item[5].item[0].item[3].answer[0].valueCoding.code
),
force_bytes("LA33-6"),
)
self.assertEqual(
force_bytes(
inst.item[2].item[5].item[0].item[3].answer[0].valueCoding.display
),
force_bytes("Yes"),
)
self.assertEqual(
force_bytes(
inst.item[2].item[5].item[0].item[3].answer[0].valueCoding.system
),
force_bytes("http://loinc.org"),
)
self.assertEqual(
force_bytes(inst.item[2].item[5].item[0].item[3].definition),
force_bytes("http://loinc.org/fhir/DataElement/54139-1"),
)
self.assertEqual(
force_bytes(inst.item[2].item[5].item[0].item[3].linkId),
force_bytes("2.1.1.4"),
)
self.assertEqual(
force_bytes(inst.item[2].item[5].item[0].item[3].text),
force_bytes("Living?"),
)
self.assertEqual(
force_bytes(
inst.item[2].item[5].item[0].item[4].answer[0].valueCoding.code
),
force_bytes("LA32-8"),
)
self.assertEqual(
force_bytes(
inst.item[2].item[5].item[0].item[4].answer[0].valueCoding.display
),
force_bytes("No"),
)
self.assertEqual(
force_bytes(
inst.item[2].item[5].item[0].item[4].answer[0].valueCoding.system
),
force_bytes("http://loinc.org"),
)
self.assertEqual(
force_bytes(inst.item[2].item[5].item[0].item[4].definition),
force_bytes("http://loinc.org/fhir/DataElement/54121-9"),
)
self.assertEqual(
force_bytes(inst.item[2].item[5].item[0].item[4].linkId),
force_bytes("2.1.1.5"),
)
self.assertEqual(
force_bytes(inst.item[2].item[5].item[0].item[4].text),
force_bytes("Was this person born a twin?"),
)
self.assertEqual(
force_bytes(
inst.item[2].item[5].item[0].item[5].answer[0].valueCoding.code
),
force_bytes("LA32-8"),
)
self.assertEqual(
force_bytes(
inst.item[2].item[5].item[0].item[5].answer[0].valueCoding.display
),
force_bytes("No"),
)
self.assertEqual(
force_bytes(
inst.item[2].item[5].item[0].item[5].answer[0].valueCoding.system
),
force_bytes("http://loinc.org"),
)
self.assertEqual(
force_bytes(inst.item[2].item[5].item[0].item[5].definition),
force_bytes("http://loinc.org/fhir/DataElement/54122-7"),
)
self.assertEqual(
force_bytes(inst.item[2].item[5].item[0].item[5].linkId),
force_bytes("2.1.1.6"),
)
self.assertEqual(
force_bytes(inst.item[2].item[5].item[0].item[5].text),
force_bytes("Was this person adopted?"),
)
self.assertEqual(
force_bytes(inst.item[2].item[5].item[0].linkId), force_bytes("2.1.1")
)
self.assertEqual(force_bytes(inst.item[2].item[5].linkId), force_bytes("2.1"))
self.assertEqual(
force_bytes(
inst.item[2].item[6].item[0].item[0].answer[0].valueCoding.code
),
force_bytes("LA10425-9"),
)
self.assertEqual(
force_bytes(
inst.item[2].item[6].item[0].item[0].answer[0].valueCoding.display
),
force_bytes("Paternal Uncle"),
)
self.assertEqual(
force_bytes(
inst.item[2].item[6].item[0].item[0].answer[0].valueCoding.system
),
force_bytes("http://loinc.org"),
)
self.assertEqual(
force_bytes(inst.item[2].item[6].item[0].item[0].definition),
force_bytes("http://loinc.org/fhir/DataElement/54136-7"),
)
self.assertEqual(
force_bytes(inst.item[2].item[6].item[0].item[0].linkId),
force_bytes("2.1.1.1"),
)
self.assertEqual(
force_bytes(inst.item[2].item[6].item[0].item[0].text),
force_bytes("Relationship to you"),
)
self.assertEqual(
force_bytes(inst.item[2].item[6].item[0].item[1].answer[0].valueString),
force_bytes("Eric"),
)
self.assertEqual(
force_bytes(inst.item[2].item[6].item[0].item[1].definition),
force_bytes("http://loinc.org/fhir/DataElement/54138-3"),
)
self.assertEqual(
force_bytes(inst.item[2].item[6].item[0].item[1].linkId),
force_bytes("2.1.1.2"),
)
self.assertEqual(
force_bytes(inst.item[2].item[6].item[0].item[1].text), force_bytes("Name")
)
self.assertEqual(
force_bytes(
inst.item[2].item[6].item[0].item[2].answer[0].valueCoding.code
),
force_bytes("LA2-8"),
)
self.assertEqual(
force_bytes(
inst.item[2].item[6].item[0].item[2].answer[0].valueCoding.display
),
force_bytes("Male"),
)
self.assertEqual(
force_bytes(
inst.item[2].item[6].item[0].item[2].answer[0].valueCoding.system
),
force_bytes("http://loinc.org"),
)
self.assertEqual(
force_bytes(inst.item[2].item[6].item[0].item[2].definition),
force_bytes("http://loinc.org/fhir/DataElement/54123-5"),
)
self.assertEqual(
force_bytes(inst.item[2].item[6].item[0].item[2].linkId),
force_bytes("2.1.1.3"),
)
self.assertEqual(
force_bytes(inst.item[2].item[6].item[0].item[2].text),
force_bytes("Gender"),
)
self.assertEqual(
inst.item[2]
.item[6]
.item[0]
.item[3]
.answer[0]
.item[0]
.item[0]
.answer[0]
.valueDecimal,
56,
)
self.assertEqual(
force_bytes(
inst.item[2]
.item[6]
.item[0]
.item[3]
.answer[0]
.item[0]
.item[0]
.definition
),
force_bytes("http://loinc.org/fhir/DataElement/54141-7"),
)
self.assertEqual(
force_bytes(
inst.item[2].item[6].item[0].item[3].answer[0].item[0].item[0].linkId
),
force_bytes("2.1.1.4.2.2"),
)
self.assertEqual(
force_bytes(
inst.item[2].item[6].item[0].item[3].answer[0].item[0].item[0].text
),
force_bytes("Age"),
)
self.assertEqual(
force_bytes(inst.item[2].item[6].item[0].item[3].answer[0].item[0].linkId),
force_bytes("2.1.1.4.2"),
)
self.assertEqual(
force_bytes(
inst.item[2].item[6].item[0].item[3].answer[0].valueCoding.code
),
force_bytes("LA33-6"),
)
self.assertEqual(
force_bytes(
inst.item[2].item[6].item[0].item[3].answer[0].valueCoding.display
),
force_bytes("Yes"),
)
self.assertEqual(
force_bytes(
inst.item[2].item[6].item[0].item[3].answer[0].valueCoding.system
),
force_bytes("http://loinc.org"),
)
self.assertEqual(
force_bytes(inst.item[2].item[6].item[0].item[3].definition),
force_bytes("http://loinc.org/fhir/DataElement/54139-1"),
)
self.assertEqual(
force_bytes(inst.item[2].item[6].item[0].item[3].linkId),
force_bytes("2.1.1.4"),
)
self.assertEqual(
force_bytes(inst.item[2].item[6].item[0].item[3].text),
force_bytes("Living?"),
)
self.assertEqual(
force_bytes(
inst.item[2].item[6].item[0].item[4].answer[0].valueCoding.code
),
force_bytes("LA32-8"),
)
self.assertEqual(
force_bytes(
inst.item[2].item[6].item[0].item[4].answer[0].valueCoding.display
),
force_bytes("No"),
)
self.assertEqual(
force_bytes(
inst.item[2].item[6].item[0].item[4].answer[0].valueCoding.system
),
force_bytes("http://loinc.org"),
)
self.assertEqual(
force_bytes(inst.item[2].item[6].item[0].item[4].definition),
force_bytes("http://loinc.org/fhir/DataElement/54121-9"),
)
self.assertEqual(
force_bytes(inst.item[2].item[6].item[0].item[4].linkId),
force_bytes("2.1.1.5"),
)
self.assertEqual(
force_bytes(inst.item[2].item[6].item[0].item[4].text),
force_bytes("Was this person born a twin?"),
)
self.assertEqual(
force_bytes(
inst.item[2].item[6].item[0].item[5].answer[0].valueCoding.code
),
force_bytes("LA32-8"),
)
self.assertEqual(
force_bytes(
inst.item[2].item[6].item[0].item[5].answer[0].valueCoding.display
),
force_bytes("No"),
)
self.assertEqual(
force_bytes(
inst.item[2].item[6].item[0].item[5].answer[0].valueCoding.system
),
force_bytes("http://loinc.org"),
)
self.assertEqual(
force_bytes(inst.item[2].item[6].item[0].item[5].definition),
force_bytes("http://loinc.org/fhir/DataElement/54122-7"),
)
self.assertEqual(
force_bytes(inst.item[2].item[6].item[0].item[5].linkId),
force_bytes("2.1.1.6"),
)
self.assertEqual(
force_bytes(inst.item[2].item[6].item[0].item[5].text),
force_bytes("Was this person adopted?"),
)
self.assertEqual(
force_bytes(inst.item[2].item[6].item[0].linkId), force_bytes("2.1.1")
)
self.assertEqual(force_bytes(inst.item[2].item[6].linkId), force_bytes("2.1"))
self.assertEqual(
force_bytes(
inst.item[2].item[7].item[0].item[0].answer[0].valueCoding.code
),
force_bytes("LA10421-8"),
)
self.assertEqual(
force_bytes(
inst.item[2].item[7].item[0].item[0].answer[0].valueCoding.display
),
force_bytes("Paternal Aunt"),
)
self.assertEqual(
force_bytes(
inst.item[2].item[7].item[0].item[0].answer[0].valueCoding.system
),
force_bytes("http://loinc.org"),
)
self.assertEqual(
force_bytes(inst.item[2].item[7].item[0].item[0].definition),
force_bytes("http://loinc.org/fhir/DataElement/54136-7"),
)
self.assertEqual(
force_bytes(inst.item[2].item[7].item[0].item[0].linkId),
force_bytes("2.1.1.1"),
)
self.assertEqual(
force_bytes(inst.item[2].item[7].item[0].item[0].text),
force_bytes("Relationship to you"),
)
self.assertEqual(
force_bytes(inst.item[2].item[7].item[0].item[1].answer[0].valueString),
force_bytes("Fiona"),
)
self.assertEqual(
force_bytes(inst.item[2].item[7].item[0].item[1].definition),
force_bytes("http://loinc.org/fhir/DataElement/54138-3"),
)
self.assertEqual(
force_bytes(inst.item[2].item[7].item[0].item[1].linkId),
force_bytes("2.1.1.2"),
)
self.assertEqual(
force_bytes(inst.item[2].item[7].item[0].item[1].text), force_bytes("Name")
)
self.assertEqual(
force_bytes(
inst.item[2].item[7].item[0].item[2].answer[0].valueCoding.code
),
force_bytes("LA3-6"),
)
self.assertEqual(
force_bytes(
inst.item[2].item[7].item[0].item[2].answer[0].valueCoding.display
),
force_bytes("Female"),
)
self.assertEqual(
force_bytes(
inst.item[2].item[7].item[0].item[2].answer[0].valueCoding.system
),
force_bytes("http://loinc.org"),
)
self.assertEqual(
force_bytes(inst.item[2].item[7].item[0].item[2].definition),
force_bytes("http://loinc.org/fhir/DataElement/54123-5"),
)
self.assertEqual(
force_bytes(inst.item[2].item[7].item[0].item[2].linkId),
force_bytes("2.1.1.3"),
)
self.assertEqual(
force_bytes(inst.item[2].item[7].item[0].item[2].text),
force_bytes("Gender"),
)
self.assertEqual(
inst.item[2]
.item[7]
.item[0]
.item[3]
.answer[0]
.item[0]
.item[0]
.answer[0]
.valueDecimal,
57,
)
self.assertEqual(
force_bytes(
inst.item[2]
.item[7]
.item[0]
.item[3]
.answer[0]
.item[0]
.item[0]
.definition
),
force_bytes("http://loinc.org/fhir/DataElement/54141-7"),
)
self.assertEqual(
force_bytes(
inst.item[2].item[7].item[0].item[3].answer[0].item[0].item[0].linkId
),
force_bytes("2.1.1.4.2.2"),
)
self.assertEqual(
force_bytes(
inst.item[2].item[7].item[0].item[3].answer[0].item[0].item[0].text
),
force_bytes("Age"),
)
self.assertEqual(
force_bytes(inst.item[2].item[7].item[0].item[3].answer[0].item[0].linkId),
force_bytes("2.1.1.4.2"),
)
self.assertEqual(
force_bytes(
inst.item[2].item[7].item[0].item[3].answer[0].valueCoding.code
),
force_bytes("LA33-6"),
)
self.assertEqual(
force_bytes(
inst.item[2].item[7].item[0].item[3].answer[0].valueCoding.display
),
force_bytes("Yes"),
)
self.assertEqual(
force_bytes(
inst.item[2].item[7].item[0].item[3].answer[0].valueCoding.system
),
force_bytes("http://loinc.org"),
)
self.assertEqual(
force_bytes(inst.item[2].item[7].item[0].item[3].definition),
force_bytes("http://loinc.org/fhir/DataElement/54139-1"),
)
self.assertEqual(
force_bytes(inst.item[2].item[7].item[0].item[3].linkId),
force_bytes("2.1.1.4"),
)
self.assertEqual(
force_bytes(inst.item[2].item[7].item[0].item[3].text),
force_bytes("Living?"),
)
self.assertEqual(
force_bytes(
inst.item[2].item[7].item[0].item[4].answer[0].valueCoding.code
),
force_bytes("LA32-8"),
)
self.assertEqual(
force_bytes(
inst.item[2].item[7].item[0].item[4].answer[0].valueCoding.display
),
force_bytes("No"),
)
self.assertEqual(
force_bytes(
inst.item[2].item[7].item[0].item[4].answer[0].valueCoding.system
),
force_bytes("http://loinc.org"),
)
self.assertEqual(
force_bytes(inst.item[2].item[7].item[0].item[4].definition),
force_bytes("http://loinc.org/fhir/DataElement/54121-9"),
)
self.assertEqual(
force_bytes(inst.item[2].item[7].item[0].item[4].linkId),
force_bytes("2.1.1.5"),
)
self.assertEqual(
force_bytes(inst.item[2].item[7].item[0].item[4].text),
force_bytes("Was this person born a twin?"),
)
self.assertEqual(
force_bytes(
inst.item[2].item[7].item[0].item[5].answer[0].valueCoding.code
),
force_bytes("LA32-8"),
)
self.assertEqual(
force_bytes(
inst.item[2].item[7].item[0].item[5].answer[0].valueCoding.display
),
force_bytes("No"),
)
self.assertEqual(
force_bytes(
inst.item[2].item[7].item[0].item[5].answer[0].valueCoding.system
),
force_bytes("http://loinc.org"),
)
self.assertEqual(
force_bytes(inst.item[2].item[7].item[0].item[5].definition),
force_bytes("http://loinc.org/fhir/DataElement/54122-7"),
)
self.assertEqual(
force_bytes(inst.item[2].item[7].item[0].item[5].linkId),
force_bytes("2.1.1.6"),
)
self.assertEqual(
force_bytes(inst.item[2].item[7].item[0].item[5].text),
force_bytes("Was this person adopted?"),
)
self.assertEqual(
force_bytes(inst.item[2].item[7].item[0].linkId), force_bytes("2.1.1")
)
self.assertEqual(
force_bytes(
inst.item[2].item[7].item[1].item[0].answer[0].valueCoding.code
),
force_bytes("LA10543-9"),
)
self.assertEqual(
force_bytes(
inst.item[2].item[7].item[1].item[0].answer[0].valueCoding.display
),
force_bytes("-- Skin Cancer"),
)
self.assertEqual(
force_bytes(
inst.item[2].item[7].item[1].item[0].answer[0].valueCoding.system
),
force_bytes("http://loinc.org"),
)
self.assertEqual(
force_bytes(inst.item[2].item[7].item[1].item[0].linkId),
force_bytes("2.1.2.1"),
)
self.assertEqual(
force_bytes(inst.item[2].item[7].item[1].item[0].text),
force_bytes("Disease or Condition"),
)
self.assertEqual(
force_bytes(inst.item[2].item[7].item[1].linkId), force_bytes("2.1.2")
)
self.assertEqual(
force_bytes(inst.item[2].item[7].item[1].text),
force_bytes("This family member's history of disease"),
)
self.assertEqual(force_bytes(inst.item[2].item[7].linkId), force_bytes("2.1"))
self.assertEqual(
force_bytes(
inst.item[2].item[8].item[0].item[0].answer[0].valueCoding.code
),
force_bytes("LA10423-4"),
)
self.assertEqual(
force_bytes(
inst.item[2].item[8].item[0].item[0].answer[0].valueCoding.display
),
force_bytes("Paternal Grandfather"),
)
self.assertEqual(
force_bytes(
inst.item[2].item[8].item[0].item[0].answer[0].valueCoding.system
),
force_bytes("http://loinc.org"),
)
self.assertEqual(
force_bytes(inst.item[2].item[8].item[0].item[0].definition),
force_bytes("http://loinc.org/fhir/DataElement/54136-7"),
)
self.assertEqual(
force_bytes(inst.item[2].item[8].item[0].item[0].linkId),
force_bytes("2.1.1.1"),
)
self.assertEqual(
force_bytes(inst.item[2].item[8].item[0].item[0].text),
force_bytes("Relationship to you"),
)
self.assertEqual(
force_bytes(inst.item[2].item[8].item[0].item[1].answer[0].valueString),
force_bytes("Bob"),
)
self.assertEqual(
force_bytes(inst.item[2].item[8].item[0].item[1].definition),
force_bytes("http://loinc.org/fhir/DataElement/54138-3"),
)
self.assertEqual(
force_bytes(inst.item[2].item[8].item[0].item[1].linkId),
force_bytes("2.1.1.2"),
)
self.assertEqual(
force_bytes(inst.item[2].item[8].item[0].item[1].text), force_bytes("Name")
)
self.assertEqual(
force_bytes(
inst.item[2].item[8].item[0].item[2].answer[0].valueCoding.code
),
force_bytes("LA2-8"),
)
self.assertEqual(
force_bytes(
inst.item[2].item[8].item[0].item[2].answer[0].valueCoding.display
),
force_bytes("Male"),
)
self.assertEqual(
force_bytes(
inst.item[2].item[8].item[0].item[2].answer[0].valueCoding.system
),
force_bytes("http://loinc.org"),
)
self.assertEqual(
force_bytes(inst.item[2].item[8].item[0].item[2].definition),
force_bytes("http://loinc.org/fhir/DataElement/54123-5"),
)
self.assertEqual(
force_bytes(inst.item[2].item[8].item[0].item[2].linkId),
force_bytes("2.1.1.3"),
)
self.assertEqual(
force_bytes(inst.item[2].item[8].item[0].item[2].text),
force_bytes("Gender"),
)
self.assertEqual(
force_bytes(
inst.item[2]
.item[8]
.item[0]
.item[3]
.answer[0]
.item[0]
.item[0]
.answer[0]
.valueCoding.code
),
force_bytes("LA10537-1"),
)
self.assertEqual(
force_bytes(
inst.item[2]
.item[8]
.item[0]
.item[3]
.answer[0]
.item[0]
.item[0]
.answer[0]
.valueCoding.display
),
force_bytes("-- Colon Cancer"),
)
self.assertEqual(
force_bytes(
inst.item[2]
.item[8]
.item[0]
.item[3]
.answer[0]
.item[0]
.item[0]
.answer[0]
.valueCoding.system
),
force_bytes("http://loinc.org"),
)
self.assertEqual(
force_bytes(
inst.item[2]
.item[8]
.item[0]
.item[3]
.answer[0]
.item[0]
.item[0]
.definition
),
force_bytes("http://loinc.org/fhir/DataElement/54112-8"),
)
self.assertEqual(
force_bytes(
inst.item[2].item[8].item[0].item[3].answer[0].item[0].item[0].linkId
),
force_bytes("2.1.1.4.1.1"),
)
self.assertEqual(
force_bytes(
inst.item[2].item[8].item[0].item[3].answer[0].item[0].item[0].text
),
force_bytes("Cause of Death"),
)
self.assertEqual(
force_bytes(
inst.item[2]
.item[8]
.item[0]
.item[3]
.answer[0]
.item[0]
.item[1]
.answer[0]
.valueCoding.code
),
force_bytes("LA10400-2"),
)
self.assertEqual(
force_bytes(
inst.item[2]
.item[8]
.item[0]
.item[3]
.answer[0]
.item[0]
.item[1]
.answer[0]
.valueCoding.display
),
force_bytes("OVER 60"),
)
self.assertEqual(
force_bytes(
inst.item[2]
.item[8]
.item[0]
.item[3]
.answer[0]
.item[0]
.item[1]
.answer[0]
.valueCoding.system
),
force_bytes("http://loinc.org"),
)
self.assertEqual(
force_bytes(
inst.item[2]
.item[8]
.item[0]
.item[3]
.answer[0]
.item[0]
.item[1]
.definition
),
force_bytes("http://loinc.org/fhir/DataElement/54113-6"),
)
self.assertEqual(
force_bytes(
inst.item[2].item[8].item[0].item[3].answer[0].item[0].item[1].linkId
),
force_bytes("2.1.1.4.1.2"),
)
self.assertEqual(
force_bytes(
inst.item[2].item[8].item[0].item[3].answer[0].item[0].item[1].text
),
force_bytes("Age at Death"),
)
self.assertEqual(
force_bytes(inst.item[2].item[8].item[0].item[3].answer[0].item[0].linkId),
force_bytes("2.1.1.4.1"),
)
self.assertEqual(
force_bytes(
inst.item[2].item[8].item[0].item[3].answer[0].valueCoding.code
),
force_bytes("LA32-8"),
)
self.assertEqual(
force_bytes(
inst.item[2].item[8].item[0].item[3].answer[0].valueCoding.display
),
force_bytes("No"),
)
self.assertEqual(
force_bytes(
inst.item[2].item[8].item[0].item[3].answer[0].valueCoding.system
),
force_bytes("http://loinc.org"),
)
self.assertEqual(
force_bytes(inst.item[2].item[8].item[0].item[3].definition),
force_bytes("http://loinc.org/fhir/DataElement/54139-1"),
)
self.assertEqual(
force_bytes(inst.item[2].item[8].item[0].item[3].linkId),
force_bytes("2.1.1.4"),
)
self.assertEqual(
force_bytes(inst.item[2].item[8].item[0].item[3].text),
force_bytes("Living?"),
)
self.assertEqual(
force_bytes(
inst.item[2].item[8].item[0].item[4].answer[0].valueCoding.code
),
force_bytes("LA32-8"),
)
self.assertEqual(
force_bytes(
inst.item[2].item[8].item[0].item[4].answer[0].valueCoding.display
),
force_bytes("No"),
)
self.assertEqual(
force_bytes(
inst.item[2].item[8].item[0].item[4].answer[0].valueCoding.system
),
force_bytes("http://loinc.org"),
)
self.assertEqual(
force_bytes(inst.item[2].item[8].item[0].item[4].definition),
force_bytes("http://loinc.org/fhir/DataElement/54121-9"),
)
self.assertEqual(
force_bytes(inst.item[2].item[8].item[0].item[4].linkId),
force_bytes("2.1.1.5"),
)
self.assertEqual(
force_bytes(inst.item[2].item[8].item[0].item[4].text),
force_bytes("Was this person born a twin?"),
)
self.assertEqual(
force_bytes(
inst.item[2].item[8].item[0].item[5].answer[0].valueCoding.code
),
force_bytes("LA32-8"),
)
self.assertEqual(
force_bytes(
inst.item[2].item[8].item[0].item[5].answer[0].valueCoding.display
),
force_bytes("No"),
)
self.assertEqual(
force_bytes(
inst.item[2].item[8].item[0].item[5].answer[0].valueCoding.system
),
force_bytes("http://loinc.org"),
)
self.assertEqual(
force_bytes(inst.item[2].item[8].item[0].item[5].definition),
force_bytes("http://loinc.org/fhir/DataElement/54122-7"),
)
self.assertEqual(
force_bytes(inst.item[2].item[8].item[0].item[5].linkId),
force_bytes("2.1.1.6"),
)
self.assertEqual(
force_bytes(inst.item[2].item[8].item[0].item[5].text),
force_bytes("Was this person adopted?"),
)
self.assertEqual(
force_bytes(inst.item[2].item[8].item[0].linkId), force_bytes("2.1.1")
)
self.assertEqual(
force_bytes(
inst.item[2].item[8].item[1].item[0].answer[0].valueCoding.code
),
force_bytes("LA10537-1"),
)
self.assertEqual(
force_bytes(
inst.item[2].item[8].item[1].item[0].answer[0].valueCoding.display
),
force_bytes("-- Colon Cancer"),
)
self.assertEqual(
force_bytes(
inst.item[2].item[8].item[1].item[0].answer[0].valueCoding.system
),
force_bytes("http://loinc.org"),
)
self.assertEqual(
force_bytes(inst.item[2].item[8].item[1].item[0].linkId),
force_bytes("2.1.2.1"),
)
self.assertEqual(
force_bytes(inst.item[2].item[8].item[1].item[0].text),
force_bytes("Disease or Condition"),
)
self.assertEqual(
force_bytes(
inst.item[2].item[8].item[1].item[1].answer[0].valueCoding.code
),
force_bytes("LA10400-2"),
)
self.assertEqual(
force_bytes(
inst.item[2].item[8].item[1].item[1].answer[0].valueCoding.display
),
force_bytes("OVER 60"),
)
self.assertEqual(
force_bytes(
inst.item[2].item[8].item[1].item[1].answer[0].valueCoding.system
),
force_bytes("http://loinc.org"),
)
self.assertEqual(
force_bytes(inst.item[2].item[8].item[1].item[1].linkId),
force_bytes("2.1.2.2"),
)
self.assertEqual(
force_bytes(inst.item[2].item[8].item[1].item[1].text),
force_bytes("Age at Diagnosis"),
)
self.assertEqual(
force_bytes(inst.item[2].item[8].item[1].linkId), force_bytes("2.1.2")
)
self.assertEqual(
force_bytes(inst.item[2].item[8].item[1].text),
force_bytes("This family member's history of disease"),
)
self.assertEqual(force_bytes(inst.item[2].item[8].linkId), force_bytes("2.1"))
self.assertEqual(
force_bytes(
inst.item[2].item[9].item[0].item[0].answer[0].valueCoding.code
),
force_bytes("LA10424-2"),
)
self.assertEqual(
force_bytes(
inst.item[2].item[9].item[0].item[0].answer[0].valueCoding.display
),
force_bytes("Paternal Grandmother"),
)
self.assertEqual(
force_bytes(
inst.item[2].item[9].item[0].item[0].answer[0].valueCoding.system
),
force_bytes("http://loinc.org"),
)
self.assertEqual(
force_bytes(inst.item[2].item[9].item[0].item[0].definition),
force_bytes("http://loinc.org/fhir/DataElement/54136-7"),
)
self.assertEqual(
force_bytes(inst.item[2].item[9].item[0].item[0].linkId),
force_bytes("2.1.1.1"),
)
self.assertEqual(
force_bytes(inst.item[2].item[9].item[0].item[0].text),
force_bytes("Relationship to you"),
)
self.assertEqual(
force_bytes(inst.item[2].item[9].item[0].item[1].answer[0].valueString),
force_bytes("Claire"),
)
self.assertEqual(
force_bytes(inst.item[2].item[9].item[0].item[1].definition),
force_bytes("http://loinc.org/fhir/DataElement/54138-3"),
)
self.assertEqual(
force_bytes(inst.item[2].item[9].item[0].item[1].linkId),
force_bytes("2.1.1.2"),
)
self.assertEqual(
force_bytes(inst.item[2].item[9].item[0].item[1].text), force_bytes("Name")
)
self.assertEqual(
force_bytes(
inst.item[2].item[9].item[0].item[2].answer[0].valueCoding.code
),
force_bytes("LA3-6"),
)
self.assertEqual(
force_bytes(
inst.item[2].item[9].item[0].item[2].answer[0].valueCoding.display
),
force_bytes("Female"),
)
self.assertEqual(
force_bytes(
inst.item[2].item[9].item[0].item[2].answer[0].valueCoding.system
),
force_bytes("http://loinc.org"),
)
self.assertEqual(
force_bytes(inst.item[2].item[9].item[0].item[2].definition),
force_bytes("http://loinc.org/fhir/DataElement/54123-5"),
)
self.assertEqual(
force_bytes(inst.item[2].item[9].item[0].item[2].linkId),
force_bytes("2.1.1.3"),
)
self.assertEqual(
force_bytes(inst.item[2].item[9].item[0].item[2].text),
force_bytes("Gender"),
)
self.assertEqual(
force_bytes(
inst.item[2]
.item[9]
.item[0]
.item[3]
.answer[0]
.item[0]
.item[0]
.answer[0]
.item[0]
.answer[0]
.valueString
),
force_bytes("Lou Gehrigs"),
)
self.assertEqual(
force_bytes(
inst.item[2]
.item[9]
.item[0]
.item[3]
.answer[0]
.item[0]
.item[0]
.answer[0]
.item[0]
.linkId
),
force_bytes("2.1.1.4.1.1.1"),
)
self.assertEqual(
force_bytes(
inst.item[2]
.item[9]
.item[0]
.item[3]
.answer[0]
.item[0]
.item[0]
.answer[0]
.item[0]
.text
),
force_bytes("Please specify"),
)
self.assertEqual(
force_bytes(
inst.item[2]
.item[9]
.item[0]
.item[3]
.answer[0]
.item[0]
.item[0]
.answer[0]
.valueCoding.code
),
force_bytes("LA10589-2"),
)
self.assertEqual(
force_bytes(
inst.item[2]
.item[9]
.item[0]
.item[3]
.answer[0]
.item[0]
.item[0]
.answer[0]
.valueCoding.display
),
force_bytes("-- Other/Unexpected"),
)
self.assertEqual(
force_bytes(
inst.item[2]
.item[9]
.item[0]
.item[3]
.answer[0]
.item[0]
.item[0]
.answer[0]
.valueCoding.system
),
force_bytes("http://loinc.org"),
)
self.assertEqual(
force_bytes(
inst.item[2]
.item[9]
.item[0]
.item[3]
.answer[0]
.item[0]
.item[0]
.definition
),
force_bytes("http://loinc.org/fhir/DataElement/54112-8"),
)
self.assertEqual(
force_bytes(
inst.item[2].item[9].item[0].item[3].answer[0].item[0].item[0].linkId
),
force_bytes("2.1.1.4.1.1"),
)
self.assertEqual(
force_bytes(
inst.item[2].item[9].item[0].item[3].answer[0].item[0].item[0].text
),
force_bytes("Cause of Death"),
)
self.assertEqual(
force_bytes(
inst.item[2]
.item[9]
.item[0]
.item[3]
.answer[0]
.item[0]
.item[1]
.answer[0]
.valueCoding.code
),
force_bytes("LA10400-2"),
)
self.assertEqual(
force_bytes(
inst.item[2]
.item[9]
.item[0]
.item[3]
.answer[0]
.item[0]
.item[1]
.answer[0]
.valueCoding.display
),
force_bytes("OVER 60"),
)
self.assertEqual(
force_bytes(
inst.item[2]
.item[9]
.item[0]
.item[3]
.answer[0]
.item[0]
.item[1]
.answer[0]
.valueCoding.system
),
force_bytes("http://loinc.org"),
)
self.assertEqual(
force_bytes(
inst.item[2]
.item[9]
.item[0]
.item[3]
.answer[0]
.item[0]
.item[1]
.definition
),
force_bytes("http://loinc.org/fhir/DataElement/54113-6"),
)
self.assertEqual(
force_bytes(
inst.item[2].item[9].item[0].item[3].answer[0].item[0].item[1].linkId
),
force_bytes("2.1.1.4.1.2"),
)
self.assertEqual(
force_bytes(
inst.item[2].item[9].item[0].item[3].answer[0].item[0].item[1].text
),
force_bytes("Age at Death"),
)
self.assertEqual(
force_bytes(inst.item[2].item[9].item[0].item[3].answer[0].item[0].linkId),
force_bytes("2.1.1.4.1"),
)
self.assertEqual(
force_bytes(
inst.item[2].item[9].item[0].item[3].answer[0].valueCoding.code
),
force_bytes("LA32-8"),
)
self.assertEqual(
force_bytes(
inst.item[2].item[9].item[0].item[3].answer[0].valueCoding.display
),
force_bytes("No"),
)
self.assertEqual(
force_bytes(
inst.item[2].item[9].item[0].item[3].answer[0].valueCoding.system
),
force_bytes("http://loinc.org"),
)
self.assertEqual(
force_bytes(inst.item[2].item[9].item[0].item[3].definition),
force_bytes("http://loinc.org/fhir/DataElement/54139-1"),
)
self.assertEqual(
force_bytes(inst.item[2].item[9].item[0].item[3].linkId),
force_bytes("2.1.1.4"),
)
self.assertEqual(
force_bytes(inst.item[2].item[9].item[0].item[3].text),
force_bytes("Living?"),
)
self.assertEqual(
force_bytes(
inst.item[2].item[9].item[0].item[4].answer[0].valueCoding.code
),
force_bytes("LA32-8"),
)
self.assertEqual(
force_bytes(
inst.item[2].item[9].item[0].item[4].answer[0].valueCoding.display
),
force_bytes("No"),
)
self.assertEqual(
force_bytes(
inst.item[2].item[9].item[0].item[4].answer[0].valueCoding.system
),
force_bytes("http://loinc.org"),
)
self.assertEqual(
force_bytes(inst.item[2].item[9].item[0].item[4].definition),
force_bytes("http://loinc.org/fhir/DataElement/54121-9"),
)
self.assertEqual(
force_bytes(inst.item[2].item[9].item[0].item[4].linkId),
force_bytes("2.1.1.5"),
)
self.assertEqual(
force_bytes(inst.item[2].item[9].item[0].item[4].text),
force_bytes("Was this person born a twin?"),
)
self.assertEqual(
force_bytes(
inst.item[2].item[9].item[0].item[5].answer[0].valueCoding.code
),
force_bytes("LA32-8"),
)
self.assertEqual(
force_bytes(
inst.item[2].item[9].item[0].item[5].answer[0].valueCoding.display
),
force_bytes("No"),
)
self.assertEqual(
force_bytes(
inst.item[2].item[9].item[0].item[5].answer[0].valueCoding.system
),
force_bytes("http://loinc.org"),
)
self.assertEqual(
force_bytes(inst.item[2].item[9].item[0].item[5].definition),
force_bytes("http://loinc.org/fhir/DataElement/54122-7"),
)
self.assertEqual(
force_bytes(inst.item[2].item[9].item[0].item[5].linkId),
force_bytes("2.1.1.6"),
)
self.assertEqual(
force_bytes(inst.item[2].item[9].item[0].item[5].text),
force_bytes("Was this person adopted?"),
)
self.assertEqual(
force_bytes(inst.item[2].item[9].item[0].linkId), force_bytes("2.1.1")
)
self.assertEqual(force_bytes(inst.item[2].item[9].linkId), force_bytes("2.1"))
self.assertEqual(force_bytes(inst.item[2].linkId), force_bytes("2"))
self.assertEqual(
force_bytes(inst.item[2].text),
force_bytes("Family member health information"),
)
self.assertEqual(force_bytes(inst.status), force_bytes("in-progress"))
self.assertEqual(force_bytes(inst.text.status), force_bytes("generated"))
def testQuestionnaireResponse3(self):
inst = self.instantiate_from(
"questionnaireresponse-example-f201-lifelines.json"
)
self.assertIsNotNone(
inst, "Must have instantiated a QuestionnaireResponse instance"
)
self.implQuestionnaireResponse3(inst)
js = inst.as_json()
self.assertEqual("QuestionnaireResponse", js["resourceType"])
inst2 = questionnaireresponse.QuestionnaireResponse(js)
self.implQuestionnaireResponse3(inst2)
def implQuestionnaireResponse3(self, inst):
self.assertEqual(inst.authored.date, FHIRDate("2013-06-18T00:00:00+01:00").date)
self.assertEqual(inst.authored.as_json(), "2013-06-18T00:00:00+01:00")
self.assertEqual(force_bytes(inst.id), force_bytes("f201"))
self.assertEqual(
force_bytes(inst.item[0].item[0].answer[0].valueString),
force_bytes("I am allergic to house dust"),
)
self.assertEqual(force_bytes(inst.item[0].item[0].linkId), force_bytes("1.1"))
self.assertEqual(
force_bytes(inst.item[0].item[0].text),
force_bytes("Do you have allergies?"),
)
self.assertEqual(force_bytes(inst.item[0].linkId), force_bytes("1"))
self.assertEqual(
force_bytes(inst.item[1].item[0].answer[0].valueString), force_bytes("Male")
)
self.assertEqual(force_bytes(inst.item[1].item[0].linkId), force_bytes("2.1"))
self.assertEqual(
force_bytes(inst.item[1].item[0].text), force_bytes("What is your gender?")
)
self.assertEqual(
inst.item[1].item[1].answer[0].valueDate.date, FHIRDate("1960-03-13").date
)
self.assertEqual(
inst.item[1].item[1].answer[0].valueDate.as_json(), "1960-03-13"
)
self.assertEqual(force_bytes(inst.item[1].item[1].linkId), force_bytes("2.2"))
self.assertEqual(
force_bytes(inst.item[1].item[1].text),
force_bytes("What is your date of birth?"),
)
self.assertEqual(
force_bytes(inst.item[1].item[2].answer[0].valueString),
force_bytes("The Netherlands"),
)
self.assertEqual(force_bytes(inst.item[1].item[2].linkId), force_bytes("2.3"))
self.assertEqual(
force_bytes(inst.item[1].item[2].text),
force_bytes("What is your country of birth?"),
)
self.assertEqual(
force_bytes(inst.item[1].item[3].answer[0].valueString),
force_bytes("married"),
)
self.assertEqual(force_bytes(inst.item[1].item[3].linkId), force_bytes("2.4"))
self.assertEqual(
force_bytes(inst.item[1].item[3].text),
force_bytes("What is your marital status?"),
)
self.assertEqual(force_bytes(inst.item[1].linkId), force_bytes("2"))
self.assertEqual(
force_bytes(inst.item[1].text), force_bytes("General questions")
)
self.assertEqual(
force_bytes(inst.item[2].item[0].answer[0].valueString), force_bytes("No")
)
self.assertEqual(force_bytes(inst.item[2].item[0].linkId), force_bytes("3.1"))
self.assertEqual(
force_bytes(inst.item[2].item[0].text), force_bytes("Do you smoke?")
)
self.assertEqual(
force_bytes(inst.item[2].item[1].answer[0].valueString),
force_bytes("No, but I used to drink"),
)
self.assertEqual(force_bytes(inst.item[2].item[1].linkId), force_bytes("3.2"))
self.assertEqual(
force_bytes(inst.item[2].item[1].text),
force_bytes("Do you drink alchohol?"),
)
self.assertEqual(force_bytes(inst.item[2].linkId), force_bytes("3"))
self.assertEqual(force_bytes(inst.item[2].text), force_bytes("Intoxications"))
self.assertEqual(force_bytes(inst.status), force_bytes("completed"))
self.assertEqual(force_bytes(inst.text.status), force_bytes("generated"))
def testQuestionnaireResponse4(self):
inst = self.instantiate_from("questionnaireresponse-example-bluebook.json")
self.assertIsNotNone(
inst, "Must have instantiated a QuestionnaireResponse instance"
)
self.implQuestionnaireResponse4(inst)
js = inst.as_json()
self.assertEqual("QuestionnaireResponse", js["resourceType"])
inst2 = questionnaireresponse.QuestionnaireResponse(js)
self.implQuestionnaireResponse4(inst2)
def implQuestionnaireResponse4(self, inst):
self.assertEqual(inst.authored.date, FHIRDate("2013-02-19T14:15:00+10:00").date)
self.assertEqual(inst.authored.as_json(), "2013-02-19T14:15:00+10:00")
self.assertEqual(force_bytes(inst.id), force_bytes("bb"))
self.assertEqual(
force_bytes(inst.item[0].item[0].item[0].answer[0].valueString),
force_bytes("Cathy Jones"),
)
self.assertEqual(
force_bytes(inst.item[0].item[0].item[0].linkId), force_bytes("nameOfChild")
)
self.assertEqual(
force_bytes(inst.item[0].item[0].item[0].text), force_bytes("Name of child")
)
self.assertEqual(
force_bytes(inst.item[0].item[0].item[1].answer[0].valueCoding.code),
force_bytes("f"),
)
self.assertEqual(
force_bytes(inst.item[0].item[0].item[1].linkId), force_bytes("sex")
)
self.assertEqual(
force_bytes(inst.item[0].item[0].item[1].text), force_bytes("Sex")
)
self.assertEqual(force_bytes(inst.item[0].item[0].linkId), force_bytes("group"))
self.assertEqual(inst.item[0].item[1].item[0].answer[0].valueDecimal, 3.25)
self.assertEqual(
force_bytes(inst.item[0].item[1].item[0].linkId), force_bytes("birthWeight")
)
self.assertEqual(
force_bytes(inst.item[0].item[1].item[0].text),
force_bytes("Birth weight (kg)"),
)
self.assertEqual(inst.item[0].item[1].item[1].answer[0].valueDecimal, 44.3)
self.assertEqual(
force_bytes(inst.item[0].item[1].item[1].linkId), force_bytes("birthLength")
)
self.assertEqual(
force_bytes(inst.item[0].item[1].item[1].text),
force_bytes("Birth length (cm)"),
)
self.assertEqual(
inst.item[0]
.item[1]
.item[2]
.answer[0]
.item[0]
.item[0]
.answer[0]
.valueDate.date,
FHIRDate("1972-11-30").date,
)
self.assertEqual(
inst.item[0]
.item[1]
.item[2]
.answer[0]
.item[0]
.item[0]
.answer[0]
.valueDate.as_json(),
"1972-11-30",
)
self.assertEqual(
force_bytes(inst.item[0].item[1].item[2].answer[0].item[0].item[0].linkId),
force_bytes("vitaminKDose1"),
)
self.assertEqual(
force_bytes(inst.item[0].item[1].item[2].answer[0].item[0].item[0].text),
force_bytes("1st dose"),
)
self.assertEqual(
inst.item[0]
.item[1]
.item[2]
.answer[0]
.item[0]
.item[1]
.answer[0]
.valueDate.date,
FHIRDate("1972-12-11").date,
)
self.assertEqual(
inst.item[0]
.item[1]
.item[2]
.answer[0]
.item[0]
.item[1]
.answer[0]
.valueDate.as_json(),
"1972-12-11",
)
self.assertEqual(
force_bytes(inst.item[0].item[1].item[2].answer[0].item[0].item[1].linkId),
force_bytes("vitaminKDose2"),
)
self.assertEqual(
force_bytes(inst.item[0].item[1].item[2].answer[0].item[0].item[1].text),
force_bytes("2nd dose"),
)
self.assertEqual(
force_bytes(inst.item[0].item[1].item[2].answer[0].item[0].linkId),
force_bytes("vitaminKgivenDoses"),
)
self.assertEqual(
force_bytes(inst.item[0].item[1].item[2].answer[0].valueCoding.code),
force_bytes("INJECTION"),
)
self.assertEqual(
force_bytes(inst.item[0].item[1].item[2].linkId),
force_bytes("vitaminKgiven"),
)
self.assertEqual(
force_bytes(inst.item[0].item[1].item[2].text),
force_bytes("Vitamin K given"),
)
self.assertEqual(
inst.item[0].item[1].item[3].answer[0].item[0].answer[0].valueDate.date,
FHIRDate("1972-12-04").date,
)
self.assertEqual(
inst.item[0]
.item[1]
.item[3]
.answer[0]
.item[0]
.answer[0]
.valueDate.as_json(),
"1972-12-04",
)
self.assertEqual(
force_bytes(inst.item[0].item[1].item[3].answer[0].item[0].linkId),
force_bytes("hepBgivenDate"),
)
self.assertEqual(
force_bytes(inst.item[0].item[1].item[3].answer[0].item[0].text),
force_bytes("Date given"),
)
self.assertTrue(inst.item[0].item[1].item[3].answer[0].valueBoolean)
self.assertEqual(
force_bytes(inst.item[0].item[1].item[3].linkId), force_bytes("hepBgiven")
)
self.assertEqual(
force_bytes(inst.item[0].item[1].item[3].text),
force_bytes("Hep B given y / n"),
)
self.assertEqual(
force_bytes(inst.item[0].item[1].item[4].answer[0].valueString),
force_bytes("Already able to speak Chinese"),
)
self.assertEqual(
force_bytes(inst.item[0].item[1].item[4].linkId),
force_bytes("abnormalitiesAtBirth"),
)
self.assertEqual(
force_bytes(inst.item[0].item[1].item[4].text),
force_bytes("Abnormalities noted at birth"),
)
self.assertEqual(
force_bytes(inst.item[0].item[1].linkId), force_bytes("neonatalInformation")
)
self.assertEqual(
force_bytes(inst.item[0].item[1].text), force_bytes("Neonatal Information")
)
self.assertEqual(force_bytes(inst.item[0].linkId), force_bytes("birthDetails"))
self.assertEqual(
force_bytes(inst.item[0].text),
force_bytes("Birth details - To be completed by health professional"),
)
self.assertEqual(force_bytes(inst.status), force_bytes("completed"))
self.assertEqual(force_bytes(inst.text.status), force_bytes("generated"))
def testQuestionnaireResponse5(self):
inst = self.instantiate_from("questionnaireresponse-example-gcs.json")
self.assertIsNotNone(
inst, "Must have instantiated a QuestionnaireResponse instance"
)
self.implQuestionnaireResponse5(inst)
js = inst.as_json()
self.assertEqual("QuestionnaireResponse", js["resourceType"])
inst2 = questionnaireresponse.QuestionnaireResponse(js)
self.implQuestionnaireResponse5(inst2)
def implQuestionnaireResponse5(self, inst):
self.assertEqual(inst.authored.date, FHIRDate("2014-12-11T04:44:16Z").date)
self.assertEqual(inst.authored.as_json(), "2014-12-11T04:44:16Z")
self.assertEqual(force_bytes(inst.id), force_bytes("gcs"))
self.assertEqual(
force_bytes(inst.item[0].answer[0].valueCoding.code),
force_bytes("LA6560-2"),
)
self.assertEqual(
force_bytes(inst.item[0].answer[0].valueCoding.display),
force_bytes("Confused"),
)
self.assertEqual(
force_bytes(inst.item[0].answer[0].valueCoding.extension[0].url),
force_bytes("http://hl7.org/fhir/StructureDefinition/iso21090-CO-value"),
)
self.assertEqual(
inst.item[0].answer[0].valueCoding.extension[0].valueDecimal, 4
)
self.assertEqual(
force_bytes(inst.item[0].answer[0].valueCoding.system),
force_bytes("http://loinc.org"),
)
self.assertEqual(force_bytes(inst.item[0].linkId), force_bytes("1.1"))
self.assertEqual(
force_bytes(inst.item[1].answer[0].valueCoding.code),
force_bytes("LA6566-9"),
)
self.assertEqual(
force_bytes(inst.item[1].answer[0].valueCoding.display),
force_bytes("Localizing pain"),
)
self.assertEqual(
force_bytes(inst.item[1].answer[0].valueCoding.extension[0].url),
force_bytes("http://hl7.org/fhir/StructureDefinition/iso21090-CO-value"),
)
self.assertEqual(
inst.item[1].answer[0].valueCoding.extension[0].valueDecimal, 5
)
self.assertEqual(
force_bytes(inst.item[1].answer[0].valueCoding.system),
force_bytes("http://loinc.org"),
)
self.assertEqual(force_bytes(inst.item[1].linkId), force_bytes("1.2"))
self.assertEqual(
force_bytes(inst.item[2].answer[0].valueCoding.code),
force_bytes("LA6556-0"),
)
self.assertEqual(
force_bytes(inst.item[2].answer[0].valueCoding.display),
force_bytes("Eyes open spontaneously"),
)
self.assertEqual(
force_bytes(inst.item[2].answer[0].valueCoding.extension[0].url),
force_bytes("http://hl7.org/fhir/StructureDefinition/iso21090-CO-value"),
)
self.assertEqual(
inst.item[2].answer[0].valueCoding.extension[0].valueDecimal, 4
)
self.assertEqual(
force_bytes(inst.item[2].answer[0].valueCoding.system),
force_bytes("http://loinc.org"),
)
self.assertEqual(force_bytes(inst.item[2].linkId), force_bytes("1.3"))
self.assertEqual(force_bytes(inst.status), force_bytes("completed"))
self.assertEqual(force_bytes(inst.text.status), force_bytes("generated"))
| 34.999705
| 88
| 0.499364
| 13,741
| 118,789
| 4.222255
| 0.027509
| 0.219759
| 0.102692
| 0.274484
| 0.956531
| 0.95136
| 0.944948
| 0.92866
| 0.916285
| 0.889276
| 0
| 0.058662
| 0.343895
| 118,789
| 3,393
| 89
| 35.010021
| 0.68575
| 0.001498
| 0
| 0.586033
| 0
| 0
| 0.08746
| 0.005202
| 0
| 0
| 0
| 0
| 0.20416
| 1
| 0.003269
| false
| 0
| 0.002377
| 0
| 0.006241
| 0
| 0
| 0
| 0
| null | 1
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 8
|
7f0344393c1976aeb9e110ab6291801931c044fd
| 6,821
|
py
|
Python
|
loldib/getratings/models/NA/na_kassadin/na_kassadin_top.py
|
koliupy/loldib
|
c9ab94deb07213cdc42b5a7c26467cdafaf81b7f
|
[
"Apache-2.0"
] | null | null | null |
loldib/getratings/models/NA/na_kassadin/na_kassadin_top.py
|
koliupy/loldib
|
c9ab94deb07213cdc42b5a7c26467cdafaf81b7f
|
[
"Apache-2.0"
] | null | null | null |
loldib/getratings/models/NA/na_kassadin/na_kassadin_top.py
|
koliupy/loldib
|
c9ab94deb07213cdc42b5a7c26467cdafaf81b7f
|
[
"Apache-2.0"
] | null | null | null |
from getratings.models.ratings import Ratings
class NA_Kassadin_Top_Aatrox(Ratings):
pass
class NA_Kassadin_Top_Ahri(Ratings):
pass
class NA_Kassadin_Top_Akali(Ratings):
pass
class NA_Kassadin_Top_Alistar(Ratings):
pass
class NA_Kassadin_Top_Amumu(Ratings):
pass
class NA_Kassadin_Top_Anivia(Ratings):
pass
class NA_Kassadin_Top_Annie(Ratings):
pass
class NA_Kassadin_Top_Ashe(Ratings):
pass
class NA_Kassadin_Top_AurelionSol(Ratings):
pass
class NA_Kassadin_Top_Azir(Ratings):
pass
class NA_Kassadin_Top_Bard(Ratings):
pass
class NA_Kassadin_Top_Blitzcrank(Ratings):
pass
class NA_Kassadin_Top_Brand(Ratings):
pass
class NA_Kassadin_Top_Braum(Ratings):
pass
class NA_Kassadin_Top_Caitlyn(Ratings):
pass
class NA_Kassadin_Top_Camille(Ratings):
pass
class NA_Kassadin_Top_Cassiopeia(Ratings):
pass
class NA_Kassadin_Top_Chogath(Ratings):
pass
class NA_Kassadin_Top_Corki(Ratings):
pass
class NA_Kassadin_Top_Darius(Ratings):
pass
class NA_Kassadin_Top_Diana(Ratings):
pass
class NA_Kassadin_Top_Draven(Ratings):
pass
class NA_Kassadin_Top_DrMundo(Ratings):
pass
class NA_Kassadin_Top_Ekko(Ratings):
pass
class NA_Kassadin_Top_Elise(Ratings):
pass
class NA_Kassadin_Top_Evelynn(Ratings):
pass
class NA_Kassadin_Top_Ezreal(Ratings):
pass
class NA_Kassadin_Top_Fiddlesticks(Ratings):
pass
class NA_Kassadin_Top_Fiora(Ratings):
pass
class NA_Kassadin_Top_Fizz(Ratings):
pass
class NA_Kassadin_Top_Galio(Ratings):
pass
class NA_Kassadin_Top_Gangplank(Ratings):
pass
class NA_Kassadin_Top_Garen(Ratings):
pass
class NA_Kassadin_Top_Gnar(Ratings):
pass
class NA_Kassadin_Top_Gragas(Ratings):
pass
class NA_Kassadin_Top_Graves(Ratings):
pass
class NA_Kassadin_Top_Hecarim(Ratings):
pass
class NA_Kassadin_Top_Heimerdinger(Ratings):
pass
class NA_Kassadin_Top_Illaoi(Ratings):
pass
class NA_Kassadin_Top_Irelia(Ratings):
pass
class NA_Kassadin_Top_Ivern(Ratings):
pass
class NA_Kassadin_Top_Janna(Ratings):
pass
class NA_Kassadin_Top_JarvanIV(Ratings):
pass
class NA_Kassadin_Top_Jax(Ratings):
pass
class NA_Kassadin_Top_Jayce(Ratings):
pass
class NA_Kassadin_Top_Jhin(Ratings):
pass
class NA_Kassadin_Top_Jinx(Ratings):
pass
class NA_Kassadin_Top_Kalista(Ratings):
pass
class NA_Kassadin_Top_Karma(Ratings):
pass
class NA_Kassadin_Top_Karthus(Ratings):
pass
class NA_Kassadin_Top_Kassadin(Ratings):
pass
class NA_Kassadin_Top_Katarina(Ratings):
pass
class NA_Kassadin_Top_Kayle(Ratings):
pass
class NA_Kassadin_Top_Kayn(Ratings):
pass
class NA_Kassadin_Top_Kennen(Ratings):
pass
class NA_Kassadin_Top_Khazix(Ratings):
pass
class NA_Kassadin_Top_Kindred(Ratings):
pass
class NA_Kassadin_Top_Kled(Ratings):
pass
class NA_Kassadin_Top_KogMaw(Ratings):
pass
class NA_Kassadin_Top_Leblanc(Ratings):
pass
class NA_Kassadin_Top_LeeSin(Ratings):
pass
class NA_Kassadin_Top_Leona(Ratings):
pass
class NA_Kassadin_Top_Lissandra(Ratings):
pass
class NA_Kassadin_Top_Lucian(Ratings):
pass
class NA_Kassadin_Top_Lulu(Ratings):
pass
class NA_Kassadin_Top_Lux(Ratings):
pass
class NA_Kassadin_Top_Malphite(Ratings):
pass
class NA_Kassadin_Top_Malzahar(Ratings):
pass
class NA_Kassadin_Top_Maokai(Ratings):
pass
class NA_Kassadin_Top_MasterYi(Ratings):
pass
class NA_Kassadin_Top_MissFortune(Ratings):
pass
class NA_Kassadin_Top_MonkeyKing(Ratings):
pass
class NA_Kassadin_Top_Mordekaiser(Ratings):
pass
class NA_Kassadin_Top_Morgana(Ratings):
pass
class NA_Kassadin_Top_Nami(Ratings):
pass
class NA_Kassadin_Top_Nasus(Ratings):
pass
class NA_Kassadin_Top_Nautilus(Ratings):
pass
class NA_Kassadin_Top_Nidalee(Ratings):
pass
class NA_Kassadin_Top_Nocturne(Ratings):
pass
class NA_Kassadin_Top_Nunu(Ratings):
pass
class NA_Kassadin_Top_Olaf(Ratings):
pass
class NA_Kassadin_Top_Orianna(Ratings):
pass
class NA_Kassadin_Top_Ornn(Ratings):
pass
class NA_Kassadin_Top_Pantheon(Ratings):
pass
class NA_Kassadin_Top_Poppy(Ratings):
pass
class NA_Kassadin_Top_Quinn(Ratings):
pass
class NA_Kassadin_Top_Rakan(Ratings):
pass
class NA_Kassadin_Top_Rammus(Ratings):
pass
class NA_Kassadin_Top_RekSai(Ratings):
pass
class NA_Kassadin_Top_Renekton(Ratings):
pass
class NA_Kassadin_Top_Rengar(Ratings):
pass
class NA_Kassadin_Top_Riven(Ratings):
pass
class NA_Kassadin_Top_Rumble(Ratings):
pass
class NA_Kassadin_Top_Ryze(Ratings):
pass
class NA_Kassadin_Top_Sejuani(Ratings):
pass
class NA_Kassadin_Top_Shaco(Ratings):
pass
class NA_Kassadin_Top_Shen(Ratings):
pass
class NA_Kassadin_Top_Shyvana(Ratings):
pass
class NA_Kassadin_Top_Singed(Ratings):
pass
class NA_Kassadin_Top_Sion(Ratings):
pass
class NA_Kassadin_Top_Sivir(Ratings):
pass
class NA_Kassadin_Top_Skarner(Ratings):
pass
class NA_Kassadin_Top_Sona(Ratings):
pass
class NA_Kassadin_Top_Soraka(Ratings):
pass
class NA_Kassadin_Top_Swain(Ratings):
pass
class NA_Kassadin_Top_Syndra(Ratings):
pass
class NA_Kassadin_Top_TahmKench(Ratings):
pass
class NA_Kassadin_Top_Taliyah(Ratings):
pass
class NA_Kassadin_Top_Talon(Ratings):
pass
class NA_Kassadin_Top_Taric(Ratings):
pass
class NA_Kassadin_Top_Teemo(Ratings):
pass
class NA_Kassadin_Top_Thresh(Ratings):
pass
class NA_Kassadin_Top_Tristana(Ratings):
pass
class NA_Kassadin_Top_Trundle(Ratings):
pass
class NA_Kassadin_Top_Tryndamere(Ratings):
pass
class NA_Kassadin_Top_TwistedFate(Ratings):
pass
class NA_Kassadin_Top_Twitch(Ratings):
pass
class NA_Kassadin_Top_Udyr(Ratings):
pass
class NA_Kassadin_Top_Urgot(Ratings):
pass
class NA_Kassadin_Top_Varus(Ratings):
pass
class NA_Kassadin_Top_Vayne(Ratings):
pass
class NA_Kassadin_Top_Veigar(Ratings):
pass
class NA_Kassadin_Top_Velkoz(Ratings):
pass
class NA_Kassadin_Top_Vi(Ratings):
pass
class NA_Kassadin_Top_Viktor(Ratings):
pass
class NA_Kassadin_Top_Vladimir(Ratings):
pass
class NA_Kassadin_Top_Volibear(Ratings):
pass
class NA_Kassadin_Top_Warwick(Ratings):
pass
class NA_Kassadin_Top_Xayah(Ratings):
pass
class NA_Kassadin_Top_Xerath(Ratings):
pass
class NA_Kassadin_Top_XinZhao(Ratings):
pass
class NA_Kassadin_Top_Yasuo(Ratings):
pass
class NA_Kassadin_Top_Yorick(Ratings):
pass
class NA_Kassadin_Top_Zac(Ratings):
pass
class NA_Kassadin_Top_Zed(Ratings):
pass
class NA_Kassadin_Top_Ziggs(Ratings):
pass
class NA_Kassadin_Top_Zilean(Ratings):
pass
class NA_Kassadin_Top_Zyra(Ratings):
pass
| 16.357314
| 46
| 0.776133
| 972
| 6,821
| 5.020576
| 0.151235
| 0.197951
| 0.42418
| 0.509016
| 0.814139
| 0.814139
| 0
| 0
| 0
| 0
| 0
| 0
| 0.162879
| 6,821
| 416
| 47
| 16.396635
| 0.854641
| 0
| 0
| 0.498195
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| true
| 0.498195
| 0.00361
| 0
| 0.501805
| 0
| 0
| 0
| 0
| null | 0
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 1
| 0
| 0
| 1
| 0
|
0
| 7
|
7f0ab917aa6f2e32c72185168d943bc515d1a8fa
| 8,769
|
py
|
Python
|
tests/rules/test_operator.py
|
chuxuantinh/vakt
|
30be4880e666a0f5dbe4dd1501870bae54822a5b
|
[
"Apache-2.0"
] | 132
|
2018-10-29T14:58:26.000Z
|
2022-03-04T10:43:26.000Z
|
tests/rules/test_operator.py
|
chuxuantinh/vakt
|
30be4880e666a0f5dbe4dd1501870bae54822a5b
|
[
"Apache-2.0"
] | 42
|
2018-08-31T10:41:43.000Z
|
2021-08-01T08:19:06.000Z
|
tests/rules/test_operator.py
|
chuxuantinh/vakt
|
30be4880e666a0f5dbe4dd1501870bae54822a5b
|
[
"Apache-2.0"
] | 22
|
2018-12-24T03:42:46.000Z
|
2022-03-16T04:42:25.000Z
|
from datetime import datetime as dt
import pytest
from vakt.rules.operator import *
from vakt.rules.base import Rule
class A:
def __init__(self, a):
self.a = a
class B(A):
def __eq__(self, other):
return self.__dict__ == other.__dict__
@pytest.mark.parametrize('val, against, result', [
('', '', True),
('foo', 'foo', True),
('foo', 'fo0', False),
('русский', 'русский', True),
('русский', 'нет', False),
(1, 1, True),
(1.0, 1.0, True),
(-1.0000008, -1.0000008, True),
(50, 50.1, False),
(50, 50.0, True),
('1', '1', True),
('1', 1, False),
((), (), False),
((1, 2), (1, 2), False),
(['1', '2'], ['1', '2'], True),
(['1', '2'], ['2', '1'], False),
(['1', '2'], ['1', '3'], False),
({}, {}, True),
({'a': [1, 2]}, {'a': [1, 2]}, True),
({'a': [1, 2]}, {'a': [2, 1]}, False),
({'a': [1, 2]}, {'a': [1, 3]}, False),
(A(1), A(1), False),
(B(1), B(1), True),
(A(1), A(2), False),
(B(1), B(2), False),
(dt.strptime("2018-01-21 02:37:21", "%Y-%m-%d %H:%M:%S"),
dt.strptime("2018-01-21 02:37:21", "%Y-%m-%d %H:%M:%S"), True),
(dt.strptime("2018-01-21 02:37:21", "%Y-%m-%d %H:%M:%S"),
dt.strptime("2018-01-21 02:37:55", "%Y-%m-%d %H:%M:%S"), False),
])
def test_eq_satisfied(val, against, result):
c = Eq(val)
assert result == c.satisfied(against)
# test after (de)serialization
jsn = Eq(val).to_json()
c1 = Rule.from_json(jsn)
assert result == c1.satisfied(against)
@pytest.mark.parametrize('val, against, result', [
('', '', False),
('foo', 'foo', False),
('foo', 'fo0', True),
('русский', 'русский', False),
('русский', 'нет', True),
(1, 1, False),
(1.0, 1.0, False),
(-1.0000008, -1.0000008, False),
(50, 50.1, True),
(50, 50.0, False),
('1', '1', False),
('1', 1, True),
((), (), True),
((1, 2), (1, 2), True),
(['1', '2'], ['1', '2'], False),
(['1', '2'], ['2', '1'], True),
(['1', '2'], ['1', '3'], True),
({}, {}, False),
({'a': [1, 2]}, {'a': [1, 2]}, False),
({'a': [1, 2]}, {'a': [2, 1]}, True),
({'a': [1, 2]}, {'a': [1, 3]}, True),
(A(1), A(1), True),
(B(1), B(1), False),
(A(1), A(2), True),
(B(1), B(2), True),
(dt.strptime("2018-01-21 02:37:21", "%Y-%m-%d %H:%M:%S"),
dt.strptime("2018-01-21 02:37:21", "%Y-%m-%d %H:%M:%S"), False),
(dt.strptime("2018-01-21 02:37:21", "%Y-%m-%d %H:%M:%S"),
dt.strptime("2018-01-21 02:37:55", "%Y-%m-%d %H:%M:%S"), True),
])
def test_not_eq_satisfied(val, against, result):
c = NotEq(val)
assert result == c.satisfied(against)
# test after (de)serialization
jsn = NotEq(val).to_json()
c1 = Rule.from_json(jsn)
assert result == c1.satisfied(against)
@pytest.mark.parametrize('val, against, result', [
('', '', False),
('foo', 'foo', False),
('foo', 'fo0', False),
('русский', 'русский', False),
('русский', 'нет', False),
('один', 'один-два', True),
(1, 1, False),
(1, 10, True),
(-30, -20, True),
(1.0, 1.0, False),
(1.0, 1.0001, True),
(1.0001, 1.0, False),
(50, 50.1, True),
(50, 50.0, False),
('1', '1', False),
('1', '3', True),
('3', '1', False),
((), (), False),
((1, 2), (1, 2), False),
((1, 2), (1, 2, 3), True),
((1, 2, 3), (1, 2), False),
(['1', '2'], ['1', '2'], False),
(['1', '2'], ['1', '2', '3'], True),
([1, 2], [1, 2, 3], True),
([1, 2, 3], [1, 2], False),
(['1', '2'], ['2', '1'], True),
(['1', '2'], ['1', '3'], True),
(['2', '1'], ['1', '2'], False),
(dt.strptime("2018-01-21 02:37:00", "%Y-%m-%d %H:%M:%S"),
dt.strptime("2018-01-21 02:37:00", "%Y-%m-%d %H:%M:%S"), False),
(dt.strptime("2018-01-21 02:37:00", "%Y-%m-%d %H:%M:%S"),
dt.strptime("2018-01-21 02:37:59", "%Y-%m-%d %H:%M:%S"), True),
(dt.strptime("2018-01-21 02:37:59", "%Y-%m-%d %H:%M:%S"),
dt.strptime("2018-01-21 02:37:00", "%Y-%m-%d %H:%M:%S"), False),
])
def test_greater_satisfied(val, against, result):
c = Greater(val)
assert result == c.satisfied(against)
# test after (de)serialization
jsn = Greater(val).to_json()
c1 = Rule.from_json(jsn)
assert result == c1.satisfied(against)
@pytest.mark.parametrize('val, against, result', [
('', '', False),
('foo', 'foo', False),
('foo', 'fo0', True),
('русский', 'русский', False),
('русский', 'нет', True),
('один', 'один-два', False),
(1, 1, False),
(1, 10, False),
(-30, -20, False),
(1.0, 1.0, False),
(1.0, 1.0001, False),
(1.0001, 1.0, True),
(50, 50.1, False),
(50, 50.0, False),
('1', '1', False),
('1', '3', False),
('3', '1', True),
((), (), False),
((1, 2), (1, 2), False),
((1, 2), (1, 2, 3), False),
((1, 2, 3), (1, 2), True),
(['1', '2'], ['1', '2'], False),
(['1', '2'], ['1', '2', '3'], False),
([1, 2], [1, 2, 3], False),
([1, 2, 3], [1, 2], True),
(['1', '2'], ['2', '1'], False),
(['1', '2'], ['1', '3'], False),
(['2', '1'], ['1', '2'], True),
(dt.strptime("2018-01-21 02:37:00", "%Y-%m-%d %H:%M:%S"),
dt.strptime("2018-01-21 02:37:00", "%Y-%m-%d %H:%M:%S"), False),
(dt.strptime("2018-01-21 02:37:00", "%Y-%m-%d %H:%M:%S"),
dt.strptime("2018-01-21 02:37:59", "%Y-%m-%d %H:%M:%S"), False),
(dt.strptime("2018-01-21 02:37:59", "%Y-%m-%d %H:%M:%S"),
dt.strptime("2018-01-21 02:37:00", "%Y-%m-%d %H:%M:%S"), True),
])
def test_less_satisfied(val, against, result):
c = Less(val)
assert result == c.satisfied(against)
# test after (de)serialization
jsn = Less(val).to_json()
c1 = Rule.from_json(jsn)
assert result == c1.satisfied(against)
@pytest.mark.parametrize('val, against, result', [
('', '', True),
('foo', 'foo', True),
('foo', 'fo0', False),
('русский', 'русский', True),
('русский', 'нет', False),
('один', 'один-два', True),
(1, 1, True),
(1, 10, True),
(-30, -20, True),
(1.0, 1.0, True),
(1.0, 1.0001, True),
(1.0001, 1.0, False),
(50, 50.1, True),
(50, 50.0, True),
('1', '1', True),
('1', '3', True),
('3', '1', False),
((), (), True),
((1, 2), (1, 2), True),
((1, 2), (1, 2, 3), True),
((1, 2, 3), (1, 2), False),
(['1', '2'], ['1', '2'], True),
(['1', '2'], ['1', '2', '3'], True),
([1, 2], [1, 2, 3], True),
([1, 2, 3], [1, 2], False),
(['1', '2'], ['2', '1'], True),
(['1', '2'], ['1', '3'], True),
(['2', '1'], ['1', '2'], False),
(dt.strptime("2018-01-21 02:37:00", "%Y-%m-%d %H:%M:%S"),
dt.strptime("2018-01-21 02:37:00", "%Y-%m-%d %H:%M:%S"), True),
(dt.strptime("2018-01-21 02:37:00", "%Y-%m-%d %H:%M:%S"),
dt.strptime("2018-01-21 02:37:59", "%Y-%m-%d %H:%M:%S"), True),
(dt.strptime("2018-01-21 02:37:59", "%Y-%m-%d %H:%M:%S"),
dt.strptime("2018-01-21 02:37:00", "%Y-%m-%d %H:%M:%S"), False),
])
def test_greater_or_equal_satisfied(val, against, result):
c = GreaterOrEqual(val)
assert result == c.satisfied(against)
# test after (de)serialization
jsn = GreaterOrEqual(val).to_json()
c1 = Rule.from_json(jsn)
assert result == c1.satisfied(against)
@pytest.mark.parametrize('val, against, result', [
('', '', True),
('foo', 'foo', True),
('foo', 'fo0', True),
('русский', 'русский', True),
('русский', 'нет', True),
('один', 'один-два', False),
(1, 1, True),
(1, 10, False),
(-30, -20, False),
(1.0, 1.0, True),
(1.0, 1.0001, False),
(1.0001, 1.0, True),
(50, 50.1, False),
(50, 50.0, True),
('1', '1', True),
('1', '3', False),
('3', '1', True),
((), (), True),
((1, 2), (1, 2), True),
((1, 2), (1, 2, 3), False),
((1, 2, 3), (1, 2), True),
(['1', '2'], ['1', '2'], True),
(['1', '2'], ['1', '2', '3'], False),
([1, 2], [1, 2, 3], False),
([1, 2, 3], [1, 2], True),
(['1', '2'], ['2', '1'], False),
(['1', '2'], ['1', '3'], False),
(['2', '1'], ['1', '2'], True),
(dt.strptime("2018-01-21 02:37:00", "%Y-%m-%d %H:%M:%S"),
dt.strptime("2018-01-21 02:37:00", "%Y-%m-%d %H:%M:%S"), True),
(dt.strptime("2018-01-21 02:37:00", "%Y-%m-%d %H:%M:%S"),
dt.strptime("2018-01-21 02:37:59", "%Y-%m-%d %H:%M:%S"), False),
(dt.strptime("2018-01-21 02:37:59", "%Y-%m-%d %H:%M:%S"),
dt.strptime("2018-01-21 02:37:00", "%Y-%m-%d %H:%M:%S"), True),
])
def test_less_or_equal_satisfied(val, against, result):
c = LessOrEqual(val)
assert result == c.satisfied(against)
# test after (de)serialization
jsn = LessOrEqual(val).to_json()
c1 = Rule.from_json(jsn)
assert result == c1.satisfied(against)
| 31.65704
| 69
| 0.455354
| 1,411
| 8,769
| 2.798016
| 0.054571
| 0.04458
| 0.113475
| 0.129686
| 0.911601
| 0.878926
| 0.842705
| 0.818896
| 0.818896
| 0.775836
| 0
| 0.141304
| 0.223629
| 8,769
| 276
| 70
| 31.771739
| 0.438602
| 0.019729
| 0
| 0.793651
| 0
| 0
| 0.196298
| 0
| 0
| 0
| 0
| 0
| 0.047619
| 1
| 0.031746
| false
| 0
| 0.015873
| 0.003968
| 0.059524
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 8
|
6194f73fe78bde5f63bf160eada13682e0b9dafe
| 21,249
|
py
|
Python
|
allfreqs/tests/constants.py
|
robertopreste/allfreqs
|
ec762259bf7bac1a1eac94bbcad3d31681e8a3b8
|
[
"MIT"
] | null | null | null |
allfreqs/tests/constants.py
|
robertopreste/allfreqs
|
ec762259bf7bac1a1eac94bbcad3d31681e8a3b8
|
[
"MIT"
] | 14
|
2019-10-18T12:46:07.000Z
|
2022-02-25T22:12:27.000Z
|
allfreqs/tests/constants.py
|
robertopreste/allfreqs
|
ec762259bf7bac1a1eac94bbcad3d31681e8a3b8
|
[
"MIT"
] | null | null | null |
#!/usr/bin/env python
# -*- coding: UTF-8 -*-
# Created by Roberto Preste
import os
import pandas as pd
DATADIR = os.path.join(os.path.dirname(os.path.realpath(__file__)), "data")
TEST_CSV = os.path.join(DATADIR, "test.csv")
SAMPLE_MULTIALG_FASTA = os.path.join(DATADIR, "sample_multialg.fasta")
SAMPLE_MULTIALG_NOREF_FASTA = os.path.join(DATADIR,
"sample_multialg_noref.fasta")
SAMPLE_REF_FASTA = os.path.join(DATADIR, "sample_ref.fasta")
SAMPLE_MULTIALG_CSV = os.path.join(DATADIR, "sample_multialg.csv")
SAMPLE_MULTIALG_NOREF_CSV = os.path.join(DATADIR, "sample_multialg_noref.csv")
SAMPLE_REF_CSV = os.path.join(DATADIR, "sample_ref.csv")
SAMPLE_FREQUENCIES = os.path.join(DATADIR, "sample_frequencies.csv")
SAMPLE_FREQUENCIES_AMB = os.path.join(DATADIR, "sample_frequencies_amb.csv")
REAL_ALG_X_FASTA = os.path.join(DATADIR, "real_datasets", "alg_X.fasta")
REAL_ALG_X_NOREF_FASTA = os.path.join(DATADIR,
"real_datasets", "alg_X_noref.fasta")
REAL_ALG_X_DF = os.path.join(DATADIR, "real_datasets", "alg_X_df.csv")
REAL_ALG_L6_FASTA = os.path.join(DATADIR, "real_datasets", "alg_L6.fasta")
REAL_ALG_L6_NOREF_FASTA = os.path.join(DATADIR,
"real_datasets", "alg_L6_noref.fasta")
REAL_ALG_L6_DF = os.path.join(DATADIR, "real_datasets", "alg_L6_df.csv")
REAL_RSRS_FASTA = os.path.join(DATADIR, "real_datasets", "RSRS.fasta")
REAL_X_FREQUENCIES = os.path.join(DATADIR,
"real_datasets", "frequencies_X.csv")
REAL_L6_FREQUENCIES = os.path.join(DATADIR,
"real_datasets", "frequencies_L6.csv")
def sample_sequences_df():
df = pd.DataFrame.from_dict({
"1.0_A": ["A", "A", "A", "A", "A"],
"2.0_A": ["A", "A", "C", "A", "A"],
"3.0_G": ["G", "G", "C", "A", "A"],
"3.1_-": ["G", "G", "-", "-", "G"],
"4.0_C": ["C", "C", "G", "C", "C"],
"5.0_T": ["T", "C", "G", "C", "C"],
"6.0_N": ["N", "T", "T", "C", "C"],
"7.0_G": ["G", "T", "T", "T", "T"],
"8.0_G": ["G", "G", "G", "T", "T"],
"9.0_G": ["G", "G", "G", "G", "G"],
"10.0_C": ["C", "C", "C", "C", "C"],
"11.0_A": ["K", "A", "C", "C", "C"],
"12.0_T": ["T", "G", "G", "G", "G"],
"13.0_T": ["T", "T", "T", "T", "G"],
"14.0_T": ["T", "G", "T", "T", "T"],
"15.0_C": ["C", "C", "C", "A", "A"],
"16.0_A": ["A", "A", "A", "C", "C"],
"17.0_G": ["G", "G", "G", "G", "G"],
"18.0_G": ["G", "G", "W", "Y", "C"],
"19.0_G": ["G", "G", "G", "T", "T"],
"20.0_T": ["T", "T", "T", "T", "T"],
"21.0_G": ["G", "G", "A", "A", "A"],
"22.0_A": ["A", "A", "C", "A", "A"],
"23.0_G": ["G", "G", "A", "A", "A"],
"24.0_C": ["C", "C", "G", "C", "C"],
"25.0_C": ["C", "C", "G", "C", "C"],
"26.0_C": ["C", "G", "T", "G", "B"],
"27.0_G": ["G", "T", "T", "A", "T"],
"28.0_G": ["G", "G", "G", "G", "T"],
"29.0_G": ["G", "G", "G", "G", "G"],
"30.0_C": ["C", "C", "C", "C", "C"],
"31.0_A": ["A", "C", "C", "C", "C"],
"32.0_A": ["A", "G", "G", "G", "G"],
"33.0_T": ["T", "G", "T", "G", "G"],
"34.0_A": ["A", "G", "T", "G", "T"],
"35.0_C": ["C", "C", "C", "A", "A"],
"36.0_A": ["A", "A", "A", "C", "C"],
"37.0_G": ["G", "C", "G", "A", "G"],
"38.0_G": ["G", "G", "G", "C", "C"],
"39.0_G": ["G", "G", "G", "T", "T"],
"39.1_-": ["-", "A", "C", "-", "-"],
"40.0_T": ["T", "T", "T", "C", "T"],
"41.0_A": ["A", "A", "A", "A", "A"],
"42.0_T": ["T", "T", "A", "T", "A"]
})
df.index = ["seq1", "seq2", "seq3", "seq4", "seq5"]
df.index.name = "id"
return df
def sample_sequences_freqs():
df = pd.DataFrame.from_dict(
{0: {'position': '1.0_A', 'A': 1.0, 'C': 0.0, 'G': 0.0, 'T': 0.0,
'gap': 0.0, 'oth': 0.0},
1: {'position': '2.0_A', 'A': 0.8, 'C': 0.2, 'G': 0.0, 'T': 0.0,
'gap': 0.0, 'oth': 0.0},
2: {'position': '3.0_G', 'A': 0.4, 'C': 0.2, 'G': 0.4, 'T': 0.0,
'gap': 0.0, 'oth': 0.0},
3: {'position': '3.1_-', 'A': 0.0, 'C': 0.0, 'G': 0.6, 'T': 0.0,
'gap': 0.4, 'oth': 0.0},
4: {'position': '4.0_C', 'A': 0.0, 'C': 0.8, 'G': 0.2, 'T': 0.0,
'gap': 0.0, 'oth': 0.0},
5: {'position': '5.0_T', 'A': 0.0, 'C': 0.6, 'G': 0.2, 'T': 0.2,
'gap': 0.0, 'oth': 0.0},
6: {'position': '6.0_N', 'A': 0.0, 'C': 0.4, 'G': 0.0, 'T': 0.4,
'gap': 0.0, 'oth': 0.2},
7: {'position': '7.0_G', 'A': 0.0, 'C': 0.0, 'G': 0.2, 'T': 0.8,
'gap': 0.0, 'oth': 0.0},
8: {'position': '8.0_G', 'A': 0.0, 'C': 0.0, 'G': 0.6, 'T': 0.4,
'gap': 0.0, 'oth': 0.0},
9: {'position': '9.0_G', 'A': 0.0, 'C': 0.0, 'G': 1.0, 'T': 0.0,
'gap': 0.0, 'oth': 0.0},
10: {'position': '10.0_C', 'A': 0.0, 'C': 1.0, 'G': 0.0, 'T': 0.0,
'gap': 0.0, 'oth': 0.0},
11: {'position': '11.0_A', 'A': 0.2, 'C': 0.6, 'G': 0.0, 'T': 0.0,
'gap': 0.0, 'oth': 0.2},
12: {'position': '12.0_T', 'A': 0.0, 'C': 0.0, 'G': 0.8, 'T': 0.2,
'gap': 0.0, 'oth': 0.0},
13: {'position': '13.0_T', 'A': 0.0, 'C': 0.0, 'G': 0.2, 'T': 0.8,
'gap': 0.0, 'oth': 0.0},
14: {'position': '14.0_T', 'A': 0.0, 'C': 0.0, 'G': 0.2, 'T': 0.8,
'gap': 0.0, 'oth': 0.0},
15: {'position': '15.0_C', 'A': 0.4, 'C': 0.6, 'G': 0.0, 'T': 0.0,
'gap': 0.0, 'oth': 0.0},
16: {'position': '16.0_A', 'A': 0.6, 'C': 0.4, 'G': 0.0, 'T': 0.0,
'gap': 0.0, 'oth': 0.0},
17: {'position': '17.0_G', 'A': 0.0, 'C': 0.0, 'G': 1.0, 'T': 0.0,
'gap': 0.0, 'oth': 0.0},
18: {'position': '18.0_G', 'A': 0.0, 'C': 0.2, 'G': 0.4, 'T': 0.0,
'gap': 0.0, 'oth': 0.4},
19: {'position': '19.0_G', 'A': 0.0, 'C': 0.0, 'G': 0.6, 'T': 0.4,
'gap': 0.0, 'oth': 0.0},
20: {'position': '20.0_T', 'A': 0.0, 'C': 0.0, 'G': 0.0, 'T': 1.0,
'gap': 0.0, 'oth': 0.0},
21: {'position': '21.0_G', 'A': 0.6, 'C': 0.0, 'G': 0.4, 'T': 0.0,
'gap': 0.0, 'oth': 0.0},
22: {'position': '22.0_A', 'A': 0.8, 'C': 0.2, 'G': 0.0, 'T': 0.0,
'gap': 0.0, 'oth': 0.0},
23: {'position': '23.0_G', 'A': 0.6, 'C': 0.0, 'G': 0.4, 'T': 0.0,
'gap': 0.0, 'oth': 0.0},
24: {'position': '24.0_C', 'A': 0.0, 'C': 0.8, 'G': 0.2, 'T': 0.0,
'gap': 0.0, 'oth': 0.0},
25: {'position': '25.0_C', 'A': 0.0, 'C': 0.8, 'G': 0.2, 'T': 0.0,
'gap': 0.0, 'oth': 0.0},
26: {'position': '26.0_C', 'A': 0.0, 'C': 0.2, 'G': 0.4, 'T': 0.2,
'gap': 0.0, 'oth': 0.2},
27: {'position': '27.0_G', 'A': 0.2, 'C': 0.0, 'G': 0.2, 'T': 0.6,
'gap': 0.0, 'oth': 0.0},
28: {'position': '28.0_G', 'A': 0.0, 'C': 0.0, 'G': 0.8, 'T': 0.2,
'gap': 0.0, 'oth': 0.0},
29: {'position': '29.0_G', 'A': 0.0, 'C': 0.0, 'G': 1.0, 'T': 0.0,
'gap': 0.0, 'oth': 0.0},
30: {'position': '30.0_C', 'A': 0.0, 'C': 1.0, 'G': 0.0, 'T': 0.0,
'gap': 0.0, 'oth': 0.0},
31: {'position': '31.0_A', 'A': 0.2, 'C': 0.8, 'G': 0.0, 'T': 0.0,
'gap': 0.0, 'oth': 0.0},
32: {'position': '32.0_A', 'A': 0.2, 'C': 0.0, 'G': 0.8, 'T': 0.0,
'gap': 0.0, 'oth': 0.0},
33: {'position': '33.0_T', 'A': 0.0, 'C': 0.0, 'G': 0.6, 'T': 0.4,
'gap': 0.0, 'oth': 0.0},
34: {'position': '34.0_A', 'A': 0.2, 'C': 0.0, 'G': 0.4, 'T': 0.4,
'gap': 0.0, 'oth': 0.0},
35: {'position': '35.0_C', 'A': 0.4, 'C': 0.6, 'G': 0.0, 'T': 0.0,
'gap': 0.0, 'oth': 0.0},
36: {'position': '36.0_A', 'A': 0.6, 'C': 0.4, 'G': 0.0, 'T': 0.0,
'gap': 0.0, 'oth': 0.0},
37: {'position': '37.0_G', 'A': 0.2, 'C': 0.2, 'G': 0.6, 'T': 0.0,
'gap': 0.0, 'oth': 0.0},
38: {'position': '38.0_G', 'A': 0.0, 'C': 0.4, 'G': 0.6, 'T': 0.0,
'gap': 0.0, 'oth': 0.0},
39: {'position': '39.0_G', 'A': 0.0, 'C': 0.0, 'G': 0.6, 'T': 0.4,
'gap': 0.0, 'oth': 0.0},
40: {'position': '39.1_-', 'A': 0.2, 'C': 0.2, 'G': 0.0, 'T': 0.0,
'gap': 0.6, 'oth': 0.0},
41: {'position': '40.0_T', 'A': 0.0, 'C': 0.2, 'G': 0.0, 'T': 0.8,
'gap': 0.0, 'oth': 0.0},
42: {'position': '41.0_A', 'A': 1.0, 'C': 0.0, 'G': 0.0, 'T': 0.0,
'gap': 0.0, 'oth': 0.0},
43: {'position': '42.0_T', 'A': 0.4, 'C': 0.0, 'G': 0.0, 'T': 0.6,
'gap': 0.0, 'oth': 0.0}},
orient="index")
return df
def sample_sequences_freqs_amb():
df = pd.DataFrame.from_dict({
0: {'position': '1.0_A', 'A': 1.0, 'C': 0.0, 'G': 0.0, 'T': 0.0,
'R': 0.0, 'Y': 0.0,
'K': 0.0, 'M': 0.0, 'S': 0.0, 'W': 0.0, 'B': 0.0, 'D': 0.0,
'H': 0.0, 'V': 0.0,
'N': 0.0, 'gap': 0.0},
1: {'position': '2.0_A', 'A': 0.8, 'C': 0.2, 'G': 0.0, 'T': 0.0,
'R': 0.0, 'Y': 0.0,
'K': 0.0, 'M': 0.0, 'S': 0.0, 'W': 0.0, 'B': 0.0, 'D': 0.0,
'H': 0.0, 'V': 0.0,
'N': 0.0, 'gap': 0.0},
2: {'position': '3.0_G', 'A': 0.4, 'C': 0.2, 'G': 0.4, 'T': 0.0,
'R': 0.0, 'Y': 0.0,
'K': 0.0, 'M': 0.0, 'S': 0.0, 'W': 0.0, 'B': 0.0, 'D': 0.0,
'H': 0.0, 'V': 0.0,
'N': 0.0, 'gap': 0.0},
3: {'position': '3.1_-', 'A': 0.0, 'C': 0.0, 'G': 0.6, 'T': 0.0,
'R': 0.0, 'Y': 0.0,
'K': 0.0, 'M': 0.0, 'S': 0.0, 'W': 0.0, 'B': 0.0, 'D': 0.0,
'H': 0.0, 'V': 0.0,
'N': 0.0, 'gap': 0.4},
4: {'position': '4.0_C', 'A': 0.0, 'C': 0.8, 'G': 0.2, 'T': 0.0,
'R': 0.0, 'Y': 0.0,
'K': 0.0, 'M': 0.0, 'S': 0.0, 'W': 0.0, 'B': 0.0, 'D': 0.0,
'H': 0.0, 'V': 0.0,
'N': 0.0, 'gap': 0.0},
5: {'position': '5.0_T', 'A': 0.0, 'C': 0.6, 'G': 0.2, 'T': 0.2,
'R': 0.0, 'Y': 0.0,
'K': 0.0, 'M': 0.0, 'S': 0.0, 'W': 0.0, 'B': 0.0, 'D': 0.0,
'H': 0.0, 'V': 0.0,
'N': 0.0, 'gap': 0.0},
6: {'position': '6.0_N', 'A': 0.0, 'C': 0.4, 'G': 0.0, 'T': 0.4,
'R': 0.0, 'Y': 0.0,
'K': 0.0, 'M': 0.0, 'S': 0.0, 'W': 0.0, 'B': 0.0, 'D': 0.0,
'H': 0.0, 'V': 0.0,
'N': 0.2, 'gap': 0.0},
7: {'position': '7.0_G', 'A': 0.0, 'C': 0.0, 'G': 0.2, 'T': 0.8,
'R': 0.0, 'Y': 0.0,
'K': 0.0, 'M': 0.0, 'S': 0.0, 'W': 0.0, 'B': 0.0, 'D': 0.0,
'H': 0.0, 'V': 0.0,
'N': 0.0, 'gap': 0.0},
8: {'position': '8.0_G', 'A': 0.0, 'C': 0.0, 'G': 0.6, 'T': 0.4,
'R': 0.0, 'Y': 0.0,
'K': 0.0, 'M': 0.0, 'S': 0.0, 'W': 0.0, 'B': 0.0, 'D': 0.0,
'H': 0.0, 'V': 0.0,
'N': 0.0, 'gap': 0.0},
9: {'position': '9.0_G', 'A': 0.0, 'C': 0.0, 'G': 1.0, 'T': 0.0,
'R': 0.0, 'Y': 0.0,
'K': 0.0, 'M': 0.0, 'S': 0.0, 'W': 0.0, 'B': 0.0, 'D': 0.0,
'H': 0.0, 'V': 0.0,
'N': 0.0, 'gap': 0.0},
10: {'position': '10.0_C', 'A': 0.0, 'C': 1.0, 'G': 0.0, 'T': 0.0,
'R': 0.0, 'Y': 0.0,
'K': 0.0, 'M': 0.0, 'S': 0.0, 'W': 0.0, 'B': 0.0, 'D': 0.0,
'H': 0.0, 'V': 0.0,
'N': 0.0, 'gap': 0.0},
11: {'position': '11.0_A', 'A': 0.2, 'C': 0.6, 'G': 0.0, 'T': 0.0,
'R': 0.0, 'Y': 0.0,
'K': 0.2, 'M': 0.0, 'S': 0.0, 'W': 0.0, 'B': 0.0, 'D': 0.0,
'H': 0.0, 'V': 0.0,
'N': 0.0, 'gap': 0.0},
12: {'position': '12.0_T', 'A': 0.0, 'C': 0.0, 'G': 0.8, 'T': 0.2,
'R': 0.0, 'Y': 0.0,
'K': 0.0, 'M': 0.0, 'S': 0.0, 'W': 0.0, 'B': 0.0, 'D': 0.0,
'H': 0.0, 'V': 0.0,
'N': 0.0, 'gap': 0.0},
13: {'position': '13.0_T', 'A': 0.0, 'C': 0.0, 'G': 0.2, 'T': 0.8,
'R': 0.0, 'Y': 0.0,
'K': 0.0, 'M': 0.0, 'S': 0.0, 'W': 0.0, 'B': 0.0, 'D': 0.0,
'H': 0.0, 'V': 0.0,
'N': 0.0, 'gap': 0.0},
14: {'position': '14.0_T', 'A': 0.0, 'C': 0.0, 'G': 0.2, 'T': 0.8,
'R': 0.0, 'Y': 0.0,
'K': 0.0, 'M': 0.0, 'S': 0.0, 'W': 0.0, 'B': 0.0, 'D': 0.0,
'H': 0.0, 'V': 0.0,
'N': 0.0, 'gap': 0.0},
15: {'position': '15.0_C', 'A': 0.4, 'C': 0.6, 'G': 0.0, 'T': 0.0,
'R': 0.0, 'Y': 0.0,
'K': 0.0, 'M': 0.0, 'S': 0.0, 'W': 0.0, 'B': 0.0, 'D': 0.0,
'H': 0.0, 'V': 0.0,
'N': 0.0, 'gap': 0.0},
16: {'position': '16.0_A', 'A': 0.6, 'C': 0.4, 'G': 0.0, 'T': 0.0,
'R': 0.0, 'Y': 0.0,
'K': 0.0, 'M': 0.0, 'S': 0.0, 'W': 0.0, 'B': 0.0, 'D': 0.0,
'H': 0.0, 'V': 0.0,
'N': 0.0, 'gap': 0.0},
17: {'position': '17.0_G', 'A': 0.0, 'C': 0.0, 'G': 1.0, 'T': 0.0,
'R': 0.0, 'Y': 0.0,
'K': 0.0, 'M': 0.0, 'S': 0.0, 'W': 0.0, 'B': 0.0, 'D': 0.0,
'H': 0.0, 'V': 0.0,
'N': 0.0, 'gap': 0.0},
18: {'position': '18.0_G', 'A': 0.0, 'C': 0.2, 'G': 0.4, 'T': 0.0,
'R': 0.0, 'Y': 0.2,
'K': 0.0, 'M': 0.0, 'S': 0.0, 'W': 0.2, 'B': 0.0, 'D': 0.0,
'H': 0.0, 'V': 0.0,
'N': 0.0, 'gap': 0.0},
19: {'position': '19.0_G', 'A': 0.0, 'C': 0.0, 'G': 0.6, 'T': 0.4,
'R': 0.0, 'Y': 0.0,
'K': 0.0, 'M': 0.0, 'S': 0.0, 'W': 0.0, 'B': 0.0, 'D': 0.0,
'H': 0.0, 'V': 0.0,
'N': 0.0, 'gap': 0.0},
20: {'position': '20.0_T', 'A': 0.0, 'C': 0.0, 'G': 0.0, 'T': 1.0,
'R': 0.0, 'Y': 0.0,
'K': 0.0, 'M': 0.0, 'S': 0.0, 'W': 0.0, 'B': 0.0, 'D': 0.0,
'H': 0.0, 'V': 0.0,
'N': 0.0, 'gap': 0.0},
21: {'position': '21.0_G', 'A': 0.6, 'C': 0.0, 'G': 0.4, 'T': 0.0,
'R': 0.0, 'Y': 0.0,
'K': 0.0, 'M': 0.0, 'S': 0.0, 'W': 0.0, 'B': 0.0, 'D': 0.0,
'H': 0.0, 'V': 0.0,
'N': 0.0, 'gap': 0.0},
22: {'position': '22.0_A', 'A': 0.8, 'C': 0.2, 'G': 0.0, 'T': 0.0,
'R': 0.0, 'Y': 0.0,
'K': 0.0, 'M': 0.0, 'S': 0.0, 'W': 0.0, 'B': 0.0, 'D': 0.0,
'H': 0.0, 'V': 0.0,
'N': 0.0, 'gap': 0.0},
23: {'position': '23.0_G', 'A': 0.6, 'C': 0.0, 'G': 0.4, 'T': 0.0,
'R': 0.0, 'Y': 0.0,
'K': 0.0, 'M': 0.0, 'S': 0.0, 'W': 0.0, 'B': 0.0, 'D': 0.0,
'H': 0.0, 'V': 0.0,
'N': 0.0, 'gap': 0.0},
24: {'position': '24.0_C', 'A': 0.0, 'C': 0.8, 'G': 0.2, 'T': 0.0,
'R': 0.0, 'Y': 0.0,
'K': 0.0, 'M': 0.0, 'S': 0.0, 'W': 0.0, 'B': 0.0, 'D': 0.0,
'H': 0.0, 'V': 0.0,
'N': 0.0, 'gap': 0.0},
25: {'position': '25.0_C', 'A': 0.0, 'C': 0.8, 'G': 0.2, 'T': 0.0,
'R': 0.0, 'Y': 0.0,
'K': 0.0, 'M': 0.0, 'S': 0.0, 'W': 0.0, 'B': 0.0, 'D': 0.0,
'H': 0.0, 'V': 0.0,
'N': 0.0, 'gap': 0.0},
26: {'position': '26.0_C', 'A': 0.0, 'C': 0.2, 'G': 0.4, 'T': 0.2,
'R': 0.0, 'Y': 0.0,
'K': 0.0, 'M': 0.0, 'S': 0.0, 'W': 0.0, 'B': 0.2, 'D': 0.0,
'H': 0.0, 'V': 0.0,
'N': 0.0, 'gap': 0.0},
27: {'position': '27.0_G', 'A': 0.2, 'C': 0.0, 'G': 0.2, 'T': 0.6,
'R': 0.0, 'Y': 0.0,
'K': 0.0, 'M': 0.0, 'S': 0.0, 'W': 0.0, 'B': 0.0, 'D': 0.0,
'H': 0.0, 'V': 0.0,
'N': 0.0, 'gap': 0.0},
28: {'position': '28.0_G', 'A': 0.0, 'C': 0.0, 'G': 0.8, 'T': 0.2,
'R': 0.0, 'Y': 0.0,
'K': 0.0, 'M': 0.0, 'S': 0.0, 'W': 0.0, 'B': 0.0, 'D': 0.0,
'H': 0.0, 'V': 0.0,
'N': 0.0, 'gap': 0.0},
29: {'position': '29.0_G', 'A': 0.0, 'C': 0.0, 'G': 1.0, 'T': 0.0,
'R': 0.0, 'Y': 0.0,
'K': 0.0, 'M': 0.0, 'S': 0.0, 'W': 0.0, 'B': 0.0, 'D': 0.0,
'H': 0.0, 'V': 0.0,
'N': 0.0, 'gap': 0.0},
30: {'position': '30.0_C', 'A': 0.0, 'C': 1.0, 'G': 0.0, 'T': 0.0,
'R': 0.0, 'Y': 0.0,
'K': 0.0, 'M': 0.0, 'S': 0.0, 'W': 0.0, 'B': 0.0, 'D': 0.0,
'H': 0.0, 'V': 0.0,
'N': 0.0, 'gap': 0.0},
31: {'position': '31.0_A', 'A': 0.2, 'C': 0.8, 'G': 0.0, 'T': 0.0,
'R': 0.0, 'Y': 0.0,
'K': 0.0, 'M': 0.0, 'S': 0.0, 'W': 0.0, 'B': 0.0, 'D': 0.0,
'H': 0.0, 'V': 0.0,
'N': 0.0, 'gap': 0.0},
32: {'position': '32.0_A', 'A': 0.2, 'C': 0.0, 'G': 0.8, 'T': 0.0,
'R': 0.0, 'Y': 0.0,
'K': 0.0, 'M': 0.0, 'S': 0.0, 'W': 0.0, 'B': 0.0, 'D': 0.0,
'H': 0.0, 'V': 0.0,
'N': 0.0, 'gap': 0.0},
33: {'position': '33.0_T', 'A': 0.0, 'C': 0.0, 'G': 0.6, 'T': 0.4,
'R': 0.0, 'Y': 0.0,
'K': 0.0, 'M': 0.0, 'S': 0.0, 'W': 0.0, 'B': 0.0, 'D': 0.0,
'H': 0.0, 'V': 0.0,
'N': 0.0, 'gap': 0.0},
34: {'position': '34.0_A', 'A': 0.2, 'C': 0.0, 'G': 0.4, 'T': 0.4,
'R': 0.0, 'Y': 0.0,
'K': 0.0, 'M': 0.0, 'S': 0.0, 'W': 0.0, 'B': 0.0, 'D': 0.0,
'H': 0.0, 'V': 0.0,
'N': 0.0, 'gap': 0.0},
35: {'position': '35.0_C', 'A': 0.4, 'C': 0.6, 'G': 0.0, 'T': 0.0,
'R': 0.0, 'Y': 0.0,
'K': 0.0, 'M': 0.0, 'S': 0.0, 'W': 0.0, 'B': 0.0, 'D': 0.0,
'H': 0.0, 'V': 0.0,
'N': 0.0, 'gap': 0.0},
36: {'position': '36.0_A', 'A': 0.6, 'C': 0.4, 'G': 0.0, 'T': 0.0,
'R': 0.0, 'Y': 0.0,
'K': 0.0, 'M': 0.0, 'S': 0.0, 'W': 0.0, 'B': 0.0, 'D': 0.0,
'H': 0.0, 'V': 0.0,
'N': 0.0, 'gap': 0.0},
37: {'position': '37.0_G', 'A': 0.2, 'C': 0.2, 'G': 0.6, 'T': 0.0,
'R': 0.0, 'Y': 0.0,
'K': 0.0, 'M': 0.0, 'S': 0.0, 'W': 0.0, 'B': 0.0, 'D': 0.0,
'H': 0.0, 'V': 0.0,
'N': 0.0, 'gap': 0.0},
38: {'position': '38.0_G', 'A': 0.0, 'C': 0.4, 'G': 0.6, 'T': 0.0,
'R': 0.0, 'Y': 0.0,
'K': 0.0, 'M': 0.0, 'S': 0.0, 'W': 0.0, 'B': 0.0, 'D': 0.0,
'H': 0.0, 'V': 0.0,
'N': 0.0, 'gap': 0.0},
39: {'position': '39.0_G', 'A': 0.0, 'C': 0.0, 'G': 0.6, 'T': 0.4,
'R': 0.0, 'Y': 0.0,
'K': 0.0, 'M': 0.0, 'S': 0.0, 'W': 0.0, 'B': 0.0, 'D': 0.0,
'H': 0.0, 'V': 0.0,
'N': 0.0, 'gap': 0.0},
40: {'position': '39.1_-', 'A': 0.2, 'C': 0.2, 'G': 0.0, 'T': 0.0,
'R': 0.0, 'Y': 0.0,
'K': 0.0, 'M': 0.0, 'S': 0.0, 'W': 0.0, 'B': 0.0, 'D': 0.0,
'H': 0.0, 'V': 0.0,
'N': 0.0, 'gap': 0.6},
41: {'position': '40.0_T', 'A': 0.0, 'C': 0.2, 'G': 0.0, 'T': 0.8,
'R': 0.0, 'Y': 0.0,
'K': 0.0, 'M': 0.0, 'S': 0.0, 'W': 0.0, 'B': 0.0, 'D': 0.0,
'H': 0.0, 'V': 0.0,
'N': 0.0, 'gap': 0.0},
42: {'position': '41.0_A', 'A': 1.0, 'C': 0.0, 'G': 0.0, 'T': 0.0,
'R': 0.0, 'Y': 0.0,
'K': 0.0, 'M': 0.0, 'S': 0.0, 'W': 0.0, 'B': 0.0, 'D': 0.0,
'H': 0.0, 'V': 0.0,
'N': 0.0, 'gap': 0.0},
43: {'position': '42.0_T', 'A': 0.4, 'C': 0.0, 'G': 0.0, 'T': 0.6,
'R': 0.0, 'Y': 0.0,
'K': 0.0, 'M': 0.0, 'S': 0.0, 'W': 0.0, 'B': 0.0, 'D': 0.0,
'H': 0.0, 'V': 0.0,
'N': 0.0, 'gap': 0.0}},
orient="index")
return df
SAMPLE_SEQUENCES_DICT = {
"seq1": "AAGGCTNGGGCKTTTCAGGGTGAGCCCGGGCAATACAGGG-TAT",
"seq2": "AAGGCCTTGGCAGTGCAGGGTGAGCCGTGGCCGGGCACGGATAT",
"seq3": "ACC-GGTTGGCCGTTCAGWGTACAGGTTGGCCGTTCAGGGCTAA",
"seq4": "AAA-CCCTTGCCGTTACGYTTAAACCGAGGCCGGGACACT-CAT",
"seq5": "AAAGCCCTTGCCGGTACGCTTAAACCBTTGCCGGTACGCT-TAA"
}
SAMPLE_SEQUENCES_TABMSA = pd.DataFrame({
"id": ["seq1", "seq2", "seq3", "seq4", "seq5"],
"sequence": ["AAGGCTNGGGCKTTTCAGGGTGAGCCCGGGCAATACAGGG-TAT",
"AAGGCCTTGGCAGTGCAGGGTGAGCCGTGGCCGGGCACGGATAT",
"ACC-GGTTGGCCGTTCAGWGTACAGGTTGGCCGTTCAGGGCTAA",
"AAA-CCCTTGCCGTTACGYTTAAACCGAGGCCGGGACACT-CAT",
"AAAGCCCTTGCCGGTACGCTTAAACCBTTGCCGGTACGCT-TAA"]
})
SAMPLE_REFERENCE_INDEXES = [
"1.0_A", "2.0_A", "3.0_G", "3.1_-", "4.0_C", "5.0_T", "6.0_N",
"7.0_G", "8.0_G", "9.0_G", "10.0_C", "11.0_A", "12.0_T",
"13.0_T", "14.0_T", "15.0_C", "16.0_A", "17.0_G", "18.0_G",
"19.0_G", "20.0_T", "21.0_G", "22.0_A", "23.0_G", "24.0_C",
"25.0_C", "26.0_C", "27.0_G", "28.0_G", "29.0_G", "30.0_C",
"31.0_A", "32.0_A", "33.0_T", "34.0_A", "35.0_C", "36.0_A",
"37.0_G", "38.0_G", "39.0_G", "39.1_-", "40.0_T", "41.0_A",
"42.0_T"
]
| 48.074661
| 78
| 0.330698
| 4,147
| 21,249
| 1.628888
| 0.031348
| 0.23242
| 0.062176
| 0.062176
| 0.802073
| 0.782235
| 0.745818
| 0.695633
| 0.666765
| 0.635085
| 0
| 0.183032
| 0.330463
| 21,249
| 441
| 79
| 48.183673
| 0.291769
| 0.0032
| 0
| 0.726619
| 0
| 0
| 0.194589
| 0.02649
| 0
| 0
| 0
| 0
| 0
| 1
| 0.007194
| false
| 0
| 0.004796
| 0
| 0.019185
| 0
| 0
| 0
| 1
| null | 1
| 0
| 0
| 1
| 1
| 1
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 8
|
619defed5e84d004981966bcb11cdc9041c80022
| 7,964
|
py
|
Python
|
pyranet/models/dev/pyranet.py
|
accasio/3DPyraNet
|
19f8d453ac5d089b5c5514ea744d895e6f8aee14
|
[
"MIT"
] | null | null | null |
pyranet/models/dev/pyranet.py
|
accasio/3DPyraNet
|
19f8d453ac5d089b5c5514ea744d895e6f8aee14
|
[
"MIT"
] | null | null | null |
pyranet/models/dev/pyranet.py
|
accasio/3DPyraNet
|
19f8d453ac5d089b5c5514ea744d895e6f8aee14
|
[
"MIT"
] | 3
|
2019-09-11T10:01:29.000Z
|
2019-10-04T09:12:17.000Z
|
import tensorflow as tf
from ..pyranet import *
# Check formula
def ws3d_layer_output_shape_transposed(input_shape, rf=(3, 4, 4), strides=(1, 1, 1, 1, 1), padding="VALID"):
padding = padding.upper()
input_shape = list(map(float, input_shape))
if padding == "VALID":
output_depth = np.round((input_shape[0] - rf[0] + 1.) / strides[1])
output_height = np.round((input_shape[1] - rf[1] + 1.) / strides[2])
output_width = np.round((input_shape[2] - rf[2] + 1.) / strides[3])
# output_depth = np.round((input_shape[0] - receptive_field[0]) / strides[1] + 1.)
# output_height = np.round((input_shape[1] - receptive_field[1]) / strides[2] + 1.)
# output_width = np.round((input_shape[2] - receptive_field[2]) / strides[3] + 1.)
elif padding == "SAME":
output_depth, output_height, output_width = [np.round(s / strides[i])
for i, s in zip(strides[1:-1], input_shape)]
else:
raise NotImplementedError("{} is not a valid padding type".format(padding))
return output_depth, output_height, output_width
def padding_tansposed(in_size, stride, padding, kernel_size):
padding_t = kernel_size - 0
output_size = in_size + (kernel_size - 1)
def padding_size(output_size, stride, kernel_size, in_size):
x = (output_size - 1) * stride + kernel_size - in_size
x_side = x / 2
x_side = 0 if x_side < 0 else x_side
return [x_side, x_side] if x % 2 == 0 or x < 0 else [x_side, x_side + 1]
def _ws3d_same_padding(input_tensor, weights, rf=(3, 4, 4), strides=(1, 1, 1, 1, 1),
padding="VALID", data_format="NDHWC", name="ws3d"):
"""
PyraNetwith padding SAME
:param input_tensor:
:param weights:
:param rf: a list containing the receptive field sizes [height, width], it is the r_l in the original paper
:param strides: overlap of receptive fields [1, stride_h, stride_w, 1], it is the o_l in the original paper
:param padding: only VALID is admitted
:param data_format:
:param name:
:return:
"""
with tf.name_scope(name):
input_shape = list(input_tensor.shape)
if data_format == "NDHWC":
n, d, h, w, c = list(map(int, input_shape))
else:
n, c, d, h, w = list(map(int, input_shape))
out_channels = int(weights.shape[-1])
output_depth, output_height, output_width = list(map(int,
ws3d_layer_output_shape_new((d, h, w), rf=rf, strides=strides,
padding=padding)))
# print output_depth, output_height, output_width
d_pad_list = padding_size(output_depth, strides[1], rf[0], d)
h_pad_list = padding_size(output_height, strides[2], rf[1], h)
w_pad_list = padding_size(output_width, strides[3], rf[2], w)
# print d_pad_list, h_pad_list, w_pad_list
if padding.upper() == "SAME":
input_tensor = tf.pad(input_tensor, paddings=[[0, 0], d_pad_list, [0, 0], [0, 0], [0, 0]])
# pad also weights? it helps to avoid multiple padding in feature maps loop
correlation = []
# Bias and conv need to be intern to weighting
with tf.name_scope("Input_Weighting_Op"):
conv_weights = tf.constant(1.0, tf.float32, shape=(rf[0], rf[1], rf[2], c, 1),
name="{}/conv_kernel".format(name))
for fm in range(out_channels):
assign_ops = []
for cd in range(output_depth):
s = cd * strides[1]
out_mul = tf.multiply(input_tensor[:, s:s + rf[0], :, :, :], weights[:, :, :, :, fm])
if padding.upper() == "SAME":
out_mul = tf.pad(out_mul, paddings=[[0, 0], [0, 0], h_pad_list, w_pad_list, [0, 0]])
with tf.name_scope("Correlation_Op"):
corr = tf.nn.conv3d(out_mul, conv_weights,
padding="VALID", strides=strides, name="xcorr3d")
# print cd, ":", out_mul, corr
assign_ops.append(corr)
# print tf.identity(assign_ops)
correlation.append(assign_ops)
# print tf.identity(correlation)
corr_axis_sorted = tf.transpose(correlation, [2, 1, 3, 4, 5, 0, 6], name="xcorr_sorted")
# Shape is like: (10, 3, 1, 5, 5, 9, 1), it is needed to remove 1 from axis 2 and 6
return tf.squeeze(corr_axis_sorted, axis=[2, 6], name="xcorr_output")
def _ws3d_transposed(input_tensor, weights, rf=(3, 4, 4), strides=(1, 1, 1, 1, 1),
padding="VALID", data_format="NDHWC", name="ws3d"):
"""
PyraNetwith padding SAME
:param input_tensor:
:param weights:
:param rf: a list containing the receptive field sizes [height, width], it is the r_l in the original paper
:param strides: overlap of receptive fields [1, stride_h, stride_w, 1], it is the o_l in the original paper
:param padding: only VALID is admitted
:param data_format:
:param name:
:return:
"""
with tf.name_scope(name):
input_shape = list(input_tensor.shape)
if data_format == "NDHWC":
n, d, h, w, c = list(map(int, input_shape))
else:
n, c, d, h, w = list(map(int, input_shape))
out_channels = int(weights.shape[-1])
output_depth, output_height, output_width = list(map(int,
ws3d_layer_output_shape_new((d, h, w), rf=rf,
strides=strides,
padding=padding)))
# print output_depth, output_height, output_width
d_pad_list = padding_size(output_depth, strides[1], rf[0], d)
h_pad_list = padding_size(output_height, strides[2], rf[1], h)
w_pad_list = padding_size(output_width, strides[3], rf[2], w)
# print d_pad_list, h_pad_list, w_pad_list
# if padding.upper() == "SAME":
input_tensor = tf.pad(input_tensor, paddings=[[0, 0], d_pad_list, [0, 0], [0, 0], [0, 0]])
# pad also weights? it helps to avoid multiple padding in feature maps loop
correlation = []
# Bias and conv need to be intern to weighting
with tf.name_scope("Input_Weighting_Op"):
conv_weights = tf.constant(1.0, tf.float32, shape=(rf[0], rf[1], rf[2], c, 1),
name="{}/conv_kernel".format(name))
for fm in range(out_channels):
assign_ops = []
for cd in range(output_depth):
s = cd * strides[1]
out_mul = tf.multiply(input_tensor[:, s:s + rf[0], :, :, :], weights[:, :, :, :, fm])
# if padding.upper() == "SAME":
out_mul = tf.pad(out_mul, paddings=[[0, 0], [0, 0], h_pad_list, w_pad_list, [0, 0]])
with tf.name_scope("Correlation_Op"):
corr = tf.nn.conv3d(out_mul, conv_weights,
padding="VALID", strides=strides, name="xcorr3d")
# print cd, ":", out_mul, corr
assign_ops.append(corr)
# print tf.identity(assign_ops)
correlation.append(assign_ops)
# print tf.identity(correlation)
corr_axis_sorted = tf.transpose(correlation, [2, 1, 3, 4, 5, 0, 6], name="xcorr_sorted")
# Shape is like: (10, 3, 1, 5, 5, 9, 1), it is needed to remove 1 from axis 2 and 6
return tf.squeeze(corr_axis_sorted, axis=[2, 6], name="xcorr_output")
| 46.573099
| 118
| 0.548091
| 1,072
| 7,964
| 3.875
| 0.129664
| 0.009629
| 0.008666
| 0.007703
| 0.849543
| 0.849543
| 0.822099
| 0.794174
| 0.779249
| 0.779249
| 0
| 0.033029
| 0.327097
| 7,964
| 170
| 119
| 46.847059
| 0.742116
| 0.225766
| 0
| 0.706522
| 0
| 0
| 0.041983
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0.054348
| false
| 0
| 0.021739
| 0
| 0.119565
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 7
|
61c336d64d5d68bd849953c8fae93b13bda472d9
| 4,346
|
py
|
Python
|
tests/test_override_environment.py
|
pedroburon/python-envtools
|
c1f00c2f2df3cb01e55a31c3616ebd8f6bd8dda9
|
[
"MIT"
] | 3
|
2017-11-28T14:53:54.000Z
|
2019-11-05T17:50:21.000Z
|
tests/test_override_environment.py
|
pedroburon/python-envtools
|
c1f00c2f2df3cb01e55a31c3616ebd8f6bd8dda9
|
[
"MIT"
] | null | null | null |
tests/test_override_environment.py
|
pedroburon/python-envtools
|
c1f00c2f2df3cb01e55a31c3616ebd8f6bd8dda9
|
[
"MIT"
] | null | null | null |
import os
import unittest
from envtools import override_environment
from . import create_unique_env_name
class TestOverrideEnvironmentContextManager(unittest.TestCase):
def test_override_nothing(self):
os.environ['SOMETHING'] = existent = 'anything'
with override_environment():
self.assertEqual(os.getenv('SOMETHING'), existent)
def test_override_var(self):
os.environ['SOMETHING'] = existent = 'anything'
expected = 'New Value'
with override_environment(SOMETHING=expected):
self.assertEqual(os.getenv('SOMETHING'), expected)
self.assertEqual(os.getenv('SOMETHING'), existent)
def test_override_vars(self):
os.environ['FOO'] = existent_foo = 'baz'
os.environ['BAR'] = existent_bar = 'anything'
expected_foo = 'foo'
expected_bar = 'bar'
with override_environment(FOO=expected_foo, BAR=expected_bar):
self.assertEqual(os.getenv('FOO'), expected_foo)
self.assertEqual(os.getenv('BAR'), expected_bar)
self.assertEqual(os.getenv('FOO'), existent_foo)
self.assertEqual(os.getenv('BAR'), existent_bar)
def test_stringify(self):
os.environ['FOO'] = existent_foo = 'baz'
os.environ['BAR'] = existent_bar = 'anything'
expected_foo = 1
expected_bar = False
with override_environment(FOO=expected_foo, BAR=expected_bar):
self.assertEqual(os.getenv('FOO'), str(expected_foo))
self.assertEqual(os.getenv('BAR'), str(expected_bar))
self.assertEqual(os.getenv('FOO'), existent_foo)
self.assertEqual(os.getenv('BAR'), existent_bar)
def test_delete(self):
env = create_unique_env_name()
os.environ[env] = 'foo'
with override_environment(**{env: None}):
self.assertNotIn(env, os.environ)
self.assertEqual('foo', os.environ[env])
def test_delete_non_existent(self):
env = create_unique_env_name()
with override_environment(**{env: None}):
self.assertNotIn(env, os.environ)
self.assertNotIn(env, os.environ)
class TestOverrideEnvironmentDecorator(unittest.TestCase):
def test_override_nothing(self):
os.environ['SOMETHING'] = existent = 'anything'
@override_environment()
def func():
self.assertEqual(os.getenv('SOMETHING'), existent)
func()
def test_override_var(self):
os.environ['SOMETHING'] = existent = 'anything'
expected = 'New Value'
@override_environment(SOMETHING=expected)
def func():
self.assertEqual(os.getenv('SOMETHING'), expected)
func()
self.assertEqual(os.getenv('SOMETHING'), existent)
def test_override_vars(self):
os.environ['FOO'] = existent_foo = 'baz'
os.environ['BAR'] = existent_bar = 'anything'
expected_foo = 'foo'
expected_bar = 'bar'
@override_environment(FOO=expected_foo, BAR=expected_bar)
def func():
self.assertEqual(os.getenv('FOO'), expected_foo)
self.assertEqual(os.getenv('BAR'), expected_bar)
func()
self.assertEqual(os.getenv('FOO'), existent_foo)
self.assertEqual(os.getenv('BAR'), existent_bar)
def test_stringify(self):
os.environ['FOO'] = existent_foo = 'baz'
os.environ['BAR'] = existent_bar = 'anything'
expected_foo = 1
expected_bar = False
@override_environment(FOO=expected_foo, BAR=expected_bar)
def func():
self.assertEqual(os.getenv('FOO'), str(expected_foo))
self.assertEqual(os.getenv('BAR'), str(expected_bar))
func()
self.assertEqual(os.getenv('FOO'), existent_foo)
self.assertEqual(os.getenv('BAR'), existent_bar)
def test_delete(self):
env = create_unique_env_name()
os.environ[env] = 'foo'
@override_environment(**{env: None})
def func():
self.assertNotIn(env, os.environ)
func()
self.assertEqual('foo', os.environ[env])
def test_delete_non_existent(self):
env = create_unique_env_name()
@override_environment(**{env: None})
def func():
self.assertNotIn(env, os.environ)
func()
self.assertNotIn(env, os.environ)
| 34.220472
| 70
| 0.631615
| 485
| 4,346
| 5.474227
| 0.08866
| 0.135593
| 0.140866
| 0.190584
| 0.897552
| 0.888889
| 0.877213
| 0.821846
| 0.821846
| 0.80678
| 0
| 0.000606
| 0.241141
| 4,346
| 126
| 71
| 34.492063
| 0.804427
| 0
| 0
| 0.9
| 0
| 0
| 0.064442
| 0
| 0
| 0
| 0
| 0
| 0.3
| 1
| 0.18
| false
| 0
| 0.04
| 0
| 0.24
| 0
| 0
| 0
| 0
| null | 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 8
|
f607de47d5f4a9187f87d7676cb49d779bbd3b55
| 88
|
py
|
Python
|
tests/test_template.py
|
TR33HGR/python-template
|
90f76b11ab197e1f65289eebfec9e63a64ea165a
|
[
"MIT"
] | null | null | null |
tests/test_template.py
|
TR33HGR/python-template
|
90f76b11ab197e1f65289eebfec9e63a64ea165a
|
[
"MIT"
] | null | null | null |
tests/test_template.py
|
TR33HGR/python-template
|
90f76b11ab197e1f65289eebfec9e63a64ea165a
|
[
"MIT"
] | null | null | null |
from python_template.template import is_true
def test():
assert is_true() is True
| 14.666667
| 44
| 0.75
| 14
| 88
| 4.5
| 0.642857
| 0.285714
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.181818
| 88
| 5
| 45
| 17.6
| 0.875
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.333333
| 1
| 0.333333
| true
| 0
| 0.333333
| 0
| 0.666667
| 0
| 1
| 0
| 0
| null | 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 1
| 1
| 0
| 1
| 0
| 1
| 0
|
0
| 7
|
f6364ccd282a05198c52102ae593c3a73133e6ba
| 7,577
|
py
|
Python
|
usercodex/plugins/ping.py
|
ipindanger/Codex-z
|
1cedebd4352e4adb914b40a219bbda752c9b39d7
|
[
"BSD-3-Clause"
] | null | null | null |
usercodex/plugins/ping.py
|
ipindanger/Codex-z
|
1cedebd4352e4adb914b40a219bbda752c9b39d7
|
[
"BSD-3-Clause"
] | null | null | null |
usercodex/plugins/ping.py
|
ipindanger/Codex-z
|
1cedebd4352e4adb914b40a219bbda752c9b39d7
|
[
"BSD-3-Clause"
] | null | null | null |
import asyncio
from datetime import datetime
from ..core.managers import edit_or_reply
from . import codex, hmention
plugin_category = "tools"
@codex.cod_cmd(
pattern="ping( -a|$)",
command=("ping", plugin_category),
info={
"header": "check how long it takes to ping your userbot",
"flags": {"-a": "average ping"},
"usage": ["{tr}ping", "{tr}ping -a"],
},
)
async def _(event):
"To check ping"
flag = event.pattern_match.group(1)
start = datetime.now()
if flag == " -a":
codevent = await edit_or_reply(event, "`!....`")
await asyncio.sleep(0.3)
await codevent.edit("`..!..`")
await asyncio.sleep(0.3)
await codevent.edit("`....!`")
end = datetime.now()
tms = (end - start).microseconds / 1000
ms = round((tms - 0.6) / 3, 3)
await codevent.edit(f"**☞ Average Pong!**\n➥ {ms} ms")
else:
codevent = await edit_or_reply(event, "<b><i>☞ Pong!</b></i>", "html")
end = datetime.now()
ms = (end - start).microseconds / 1000
await codevent.edit(
f"<b><i>☞ Pong</b></i>\n➥ {ms} <b><i>ms\n➥ Bot of {hmention}</b></i>",
parse_mode="html",
)
@codex.cod_cmd(
pattern="fping$",
command=("fping", plugin_category),
info={"header": "Shows the server ping with extra animation", "usage": "{tr}fping"},
)
async def _(event):
"To check ping with animation"
start = datetime.now()
animation_interval = 0.3
animation_ttl = range(26)
event = await edit_or_reply(event, "ping....")
animation_chars = [
"⬛⬛⬛⬛⬛⬛⬛⬛⬛",
"⬛⬛⬛⬛⬛⬛⬛⬛⬛ \n⬛📶📶📶📶📶📶📶⬛",
"⬛⬛⬛⬛⬛⬛⬛⬛⬛ \n⬛📶📶📶📶📶📶📶⬛ \n⬛⬛⬛⬛📶⬛⬛📶⬛",
"⬛⬛⬛⬛⬛⬛⬛⬛⬛ \n⬛📶📶📶📶📶📶📶⬛ \n⬛⬛⬛⬛📶⬛⬛📶⬛ \n⬛⬛⬛⬛📶⬛⬛📶⬛ ",
"⬛⬛⬛⬛⬛⬛⬛⬛⬛ \n⬛📶📶📶📶📶📶📶⬛ \n⬛⬛⬛⬛📶⬛⬛📶⬛ \n⬛⬛⬛⬛📶⬛⬛📶⬛ \n⬛⬛⬛⬛📶⬛⬛📶⬛ ",
"⬛⬛⬛⬛⬛⬛⬛⬛⬛ \n⬛📶📶📶📶📶📶📶⬛ \n⬛⬛⬛⬛📶⬛⬛📶⬛ \n⬛⬛⬛⬛📶⬛⬛📶⬛ \n⬛⬛⬛⬛📶⬛⬛📶⬛ \n⬛⬛⬛⬛⬛📶📶⬛⬛",
"⬛⬛⬛⬛⬛⬛⬛⬛⬛ \n⬛📶📶📶📶📶📶📶⬛ \n⬛⬛⬛⬛📶⬛⬛📶⬛ \n⬛⬛⬛⬛📶⬛⬛📶⬛ \n⬛⬛⬛⬛📶⬛⬛📶⬛ \n⬛⬛⬛⬛⬛📶📶⬛⬛ \n⬛⬛⬛⬛⬛⬛⬛⬛⬛",
"⬛⬛⬛⬛⬛⬛⬛⬛⬛ \n⬛📶📶📶📶📶📶📶⬛ \n⬛⬛⬛⬛📶⬛⬛📶⬛ \n⬛⬛⬛⬛📶⬛⬛📶⬛ \n⬛⬛⬛⬛📶⬛⬛📶⬛ \n⬛⬛⬛⬛⬛📶📶⬛⬛ \n⬛⬛⬛⬛⬛⬛⬛⬛⬛ \n⬛📶⬛⬛⬛⬛⬛📶⬛",
"⬛⬛⬛⬛⬛⬛⬛⬛⬛ \n⬛📶📶📶📶📶📶📶⬛ \n⬛⬛⬛⬛📶⬛⬛📶⬛ \n⬛⬛⬛⬛📶⬛⬛📶⬛ \n⬛⬛⬛⬛📶⬛⬛📶⬛ \n⬛⬛⬛⬛⬛📶📶⬛⬛ \n⬛⬛⬛⬛⬛⬛⬛⬛⬛ \n⬛📶⬛⬛⬛⬛⬛📶⬛ \n⬛📶📶📶📶📶📶📶⬛",
"⬛⬛⬛⬛⬛⬛⬛⬛⬛ \n⬛📶📶📶📶📶📶📶⬛ \n⬛⬛⬛⬛📶⬛⬛📶⬛ \n⬛⬛⬛⬛📶⬛⬛📶⬛ \n⬛⬛⬛⬛📶⬛⬛📶⬛ \n⬛⬛⬛⬛⬛📶📶⬛⬛ \n⬛⬛⬛⬛⬛⬛⬛⬛⬛ \n⬛📶⬛⬛⬛⬛⬛📶⬛ \n⬛📶📶📶📶📶📶📶⬛ \n⬛📶⬛⬛⬛⬛⬛📶⬛",
"⬛⬛⬛⬛⬛⬛⬛⬛⬛ \n⬛📶📶📶📶📶📶📶⬛ \n⬛⬛⬛⬛📶⬛⬛📶⬛ \n⬛⬛⬛⬛📶⬛⬛📶⬛ \n⬛⬛⬛⬛📶⬛⬛📶⬛ \n⬛⬛⬛⬛⬛📶📶⬛⬛ \n⬛⬛⬛⬛⬛⬛⬛⬛⬛ \n⬛📶⬛⬛⬛⬛⬛📶⬛ \n⬛📶📶📶📶📶📶📶⬛ \n⬛📶⬛⬛⬛⬛⬛📶⬛ \n⬛⬛⬛⬛⬛⬛⬛⬛⬛",
"⬛⬛⬛⬛⬛⬛⬛⬛⬛ \n⬛📶📶📶📶📶📶📶⬛ \n⬛⬛⬛⬛📶⬛⬛📶⬛ \n⬛⬛⬛⬛📶⬛⬛📶⬛ \n⬛⬛⬛⬛📶⬛⬛📶⬛ \n⬛⬛⬛⬛⬛📶📶⬛⬛ \n⬛⬛⬛⬛⬛⬛⬛⬛⬛ \n⬛📶⬛⬛⬛⬛⬛📶⬛ \n⬛📶📶📶📶📶📶📶⬛ \n⬛📶⬛⬛⬛⬛⬛📶⬛ \n⬛⬛⬛⬛⬛⬛⬛⬛⬛ \n⬛📶📶📶📶📶📶📶⬛",
"⬛⬛⬛⬛⬛⬛⬛⬛⬛ \n⬛📶📶📶📶📶📶📶⬛ \n⬛⬛⬛⬛📶⬛⬛📶⬛ \n⬛⬛⬛⬛📶⬛⬛📶⬛ \n⬛⬛⬛⬛📶⬛⬛📶⬛ \n⬛⬛⬛⬛⬛📶📶⬛⬛ \n⬛⬛⬛⬛⬛⬛⬛⬛⬛ \n⬛📶⬛⬛⬛⬛⬛📶⬛ \n⬛📶📶📶📶📶📶📶⬛ \n⬛📶⬛⬛⬛⬛⬛📶⬛ \n⬛⬛⬛⬛⬛⬛⬛⬛⬛ \n⬛📶📶📶📶📶📶📶⬛ \n⬛⬛⬛⬛⬛⬛📶⬛⬛",
"⬛⬛⬛⬛⬛⬛⬛⬛⬛ \n⬛📶📶📶📶📶📶📶⬛ \n⬛⬛⬛⬛📶⬛⬛📶⬛ \n⬛⬛⬛⬛📶⬛⬛📶⬛ \n⬛⬛⬛⬛📶⬛⬛📶⬛ \n⬛⬛⬛⬛⬛📶📶⬛⬛ \n⬛⬛⬛⬛⬛⬛⬛⬛⬛ \n⬛📶⬛⬛⬛⬛⬛📶⬛ \n⬛📶📶📶📶📶📶📶⬛ \n⬛📶⬛⬛⬛⬛⬛📶⬛ \n⬛⬛⬛⬛⬛⬛⬛⬛⬛ \n⬛📶📶📶📶📶📶📶⬛ \n⬛⬛⬛⬛⬛⬛📶⬛⬛ \n⬛⬛⬛⬛⬛📶⬛⬛⬛",
"⬛⬛⬛⬛⬛⬛⬛⬛⬛ \n⬛📶📶📶📶📶📶📶⬛ \n⬛⬛⬛⬛📶⬛⬛📶⬛ \n⬛⬛⬛⬛📶⬛⬛📶⬛ \n⬛⬛⬛⬛📶⬛⬛📶⬛ \n⬛⬛⬛⬛⬛📶📶⬛⬛ \n⬛⬛⬛⬛⬛⬛⬛⬛⬛ \n⬛📶⬛⬛⬛⬛⬛📶⬛ \n⬛📶📶📶📶📶📶📶⬛ \n⬛📶⬛⬛⬛⬛⬛📶⬛ \n⬛⬛⬛⬛⬛⬛⬛⬛⬛ \n⬛📶📶📶📶📶📶📶⬛ \n⬛⬛⬛⬛⬛⬛📶⬛⬛ \n⬛⬛⬛⬛⬛📶⬛⬛⬛ \n⬛⬛⬛⬛📶⬛⬛⬛⬛",
"⬛⬛⬛⬛⬛⬛⬛⬛⬛ \n⬛📶📶📶📶📶📶📶⬛ \n⬛⬛⬛⬛📶⬛⬛📶⬛ \n⬛⬛⬛⬛📶⬛⬛📶⬛ \n⬛⬛⬛⬛📶⬛⬛📶⬛ \n⬛⬛⬛⬛⬛📶📶⬛⬛ \n⬛⬛⬛⬛⬛⬛⬛⬛⬛ \n⬛📶⬛⬛⬛⬛⬛📶⬛ \n⬛📶📶📶📶📶📶📶⬛ \n⬛📶⬛⬛⬛⬛⬛📶⬛ \n⬛⬛⬛⬛⬛⬛⬛⬛⬛ \n⬛📶📶📶📶📶📶📶⬛ \n⬛⬛⬛⬛⬛⬛📶⬛⬛ \n⬛⬛⬛⬛⬛📶⬛⬛⬛ \n⬛⬛⬛⬛📶⬛⬛⬛⬛ \n⬛📶📶📶📶📶📶📶⬛",
"⬛⬛⬛⬛⬛⬛⬛⬛⬛ \n⬛📶📶📶📶📶📶📶⬛ \n⬛⬛⬛⬛📶⬛⬛📶⬛ \n⬛⬛⬛⬛📶⬛⬛📶⬛ \n⬛⬛⬛⬛📶⬛⬛📶⬛ \n⬛⬛⬛⬛⬛📶📶⬛⬛ \n⬛⬛⬛⬛⬛⬛⬛⬛⬛ \n⬛📶⬛⬛⬛⬛⬛📶⬛ \n⬛📶📶📶📶📶📶📶⬛ \n⬛📶⬛⬛⬛⬛⬛📶⬛ \n⬛⬛⬛⬛⬛⬛⬛⬛⬛ \n⬛📶📶📶📶📶📶📶⬛ \n⬛⬛⬛⬛⬛⬛📶⬛⬛ \n⬛⬛⬛⬛⬛📶⬛⬛⬛ \n⬛⬛⬛⬛📶⬛⬛⬛⬛ \n⬛📶📶📶📶📶📶📶⬛ \n⬛⬛⬛⬛⬛⬛⬛⬛⬛",
"⬛⬛⬛⬛⬛⬛⬛⬛⬛ \n⬛📶📶📶📶📶📶📶⬛ \n⬛⬛⬛⬛📶⬛⬛📶⬛ \n⬛⬛⬛⬛📶⬛⬛📶⬛ \n⬛⬛⬛⬛📶⬛⬛📶⬛ \n⬛⬛⬛⬛⬛📶📶⬛⬛ \n⬛⬛⬛⬛⬛⬛⬛⬛⬛ \n⬛📶⬛⬛⬛⬛⬛📶⬛ \n⬛📶📶📶📶📶📶📶⬛ \n⬛📶⬛⬛⬛⬛⬛📶⬛ \n⬛⬛⬛⬛⬛⬛⬛⬛⬛ \n⬛📶📶📶📶📶📶📶⬛ \n⬛⬛⬛⬛⬛⬛📶⬛⬛ \n⬛⬛⬛⬛⬛📶⬛⬛⬛ \n⬛⬛⬛⬛📶⬛⬛⬛⬛ \n⬛📶📶📶📶📶📶📶⬛ \n⬛⬛⬛⬛⬛⬛⬛⬛⬛ \n⬛⬛📶📶📶📶📶⬛⬛",
"⬛⬛⬛⬛⬛⬛⬛⬛⬛ \n⬛📶📶📶📶📶📶📶⬛ \n⬛⬛⬛⬛📶⬛⬛📶⬛ \n⬛⬛⬛⬛📶⬛⬛📶⬛ \n⬛⬛⬛⬛📶⬛⬛📶⬛ \n⬛⬛⬛⬛⬛📶📶⬛⬛ \n⬛⬛⬛⬛⬛⬛⬛⬛⬛ \n⬛📶⬛⬛⬛⬛⬛📶⬛ \n⬛📶📶📶📶📶📶📶⬛ \n⬛📶⬛⬛⬛⬛⬛📶⬛ \n⬛⬛⬛⬛⬛⬛⬛⬛⬛ \n⬛📶📶📶📶📶📶📶⬛ \n⬛⬛⬛⬛⬛⬛📶⬛⬛ \n⬛⬛⬛⬛⬛📶⬛⬛⬛ \n⬛⬛⬛⬛📶⬛⬛⬛⬛ \n⬛📶📶📶📶📶📶📶⬛ \n⬛⬛⬛⬛⬛⬛⬛⬛⬛ \n⬛⬛📶📶📶📶📶⬛⬛ \n⬛📶⬛⬛⬛⬛⬛📶⬛",
"⬛⬛⬛⬛⬛⬛⬛⬛⬛ \n⬛📶📶📶📶📶📶📶⬛ \n⬛⬛⬛⬛📶⬛⬛📶⬛ \n⬛⬛⬛⬛📶⬛⬛📶⬛ \n⬛⬛⬛⬛📶⬛⬛📶⬛ \n⬛⬛⬛⬛⬛📶📶⬛⬛ \n⬛⬛⬛⬛⬛⬛⬛⬛⬛ \n⬛📶⬛⬛⬛⬛⬛📶⬛ \n⬛📶📶📶📶📶📶📶⬛ \n⬛📶⬛⬛⬛⬛⬛📶⬛ \n⬛⬛⬛⬛⬛⬛⬛⬛⬛ \n⬛📶📶📶📶📶📶📶⬛ \n⬛⬛⬛⬛⬛⬛📶⬛⬛ \n⬛⬛⬛⬛⬛📶⬛⬛⬛ \n⬛⬛⬛⬛📶⬛⬛⬛⬛ \n⬛📶📶📶📶📶📶📶⬛ \n⬛⬛⬛⬛⬛⬛⬛⬛⬛ \n⬛⬛📶📶📶📶📶⬛⬛ \n⬛📶⬛⬛⬛⬛⬛📶⬛ \n⬛📶⬛⬛⬛⬛⬛📶⬛",
"⬛⬛⬛⬛⬛⬛⬛⬛⬛ \n⬛📶📶📶📶📶📶📶⬛ \n⬛⬛⬛⬛📶⬛⬛📶⬛ \n⬛⬛⬛⬛📶⬛⬛📶⬛ \n⬛⬛⬛⬛📶⬛⬛📶⬛ \n⬛⬛⬛⬛⬛📶📶⬛⬛ \n⬛⬛⬛⬛⬛⬛⬛⬛⬛ \n⬛📶⬛⬛⬛⬛⬛📶⬛ \n⬛📶📶📶📶📶📶📶⬛ \n⬛📶⬛⬛⬛⬛⬛📶⬛ \n⬛⬛⬛⬛⬛⬛⬛⬛⬛ \n⬛📶📶📶📶📶📶📶⬛ \n⬛⬛⬛⬛⬛⬛📶⬛⬛ \n⬛⬛⬛⬛⬛📶⬛⬛⬛ \n⬛⬛⬛⬛📶⬛⬛⬛⬛ \n⬛📶📶📶📶📶📶📶⬛ \n⬛⬛⬛⬛⬛⬛⬛⬛⬛ \n⬛⬛📶📶📶📶📶⬛⬛ \n⬛📶⬛⬛⬛⬛⬛📶⬛ \n⬛📶⬛⬛⬛⬛⬛📶⬛ \n⬛📶⬛📶⬛⬛⬛📶⬛",
"⬛⬛⬛⬛⬛⬛⬛⬛⬛ \n⬛📶📶📶📶📶📶📶⬛ \n⬛⬛⬛⬛📶⬛⬛📶⬛ \n⬛⬛⬛⬛📶⬛⬛📶⬛ \n⬛⬛⬛⬛📶⬛⬛📶⬛ \n⬛⬛⬛⬛⬛📶📶⬛⬛ \n⬛⬛⬛⬛⬛⬛⬛⬛⬛ \n⬛📶⬛⬛⬛⬛⬛📶⬛ \n⬛📶📶📶📶📶📶📶⬛ \n⬛📶⬛⬛⬛⬛⬛📶⬛ \n⬛⬛⬛⬛⬛⬛⬛⬛⬛ \n⬛📶📶📶📶📶📶📶⬛ \n⬛⬛⬛⬛⬛⬛📶⬛⬛ \n⬛⬛⬛⬛⬛📶⬛⬛⬛ \n⬛⬛⬛⬛📶⬛⬛⬛⬛ \n⬛📶📶📶📶📶📶📶⬛ \n⬛⬛⬛⬛⬛⬛⬛⬛⬛ \n⬛⬛📶📶📶📶📶⬛⬛ \n⬛📶⬛⬛⬛⬛⬛📶⬛ \n⬛📶⬛⬛⬛⬛⬛📶⬛ \n⬛📶⬛📶⬛⬛⬛📶⬛ \n⬛⬛📶📶⬛⬛📶⬛⬛",
"⬛⬛⬛⬛⬛⬛⬛⬛⬛ \n⬛📶📶📶📶📶📶📶⬛ \n⬛⬛⬛⬛📶⬛⬛📶⬛ \n⬛⬛⬛⬛📶⬛⬛📶⬛ \n⬛⬛⬛⬛📶⬛⬛📶⬛ \n⬛⬛⬛⬛⬛📶📶⬛⬛ \n⬛⬛⬛⬛⬛⬛⬛⬛⬛ \n⬛📶⬛⬛⬛⬛⬛📶⬛ \n⬛📶📶📶📶📶📶📶⬛ \n⬛📶⬛⬛⬛⬛⬛📶⬛ \n⬛⬛⬛⬛⬛⬛⬛⬛⬛ \n⬛📶📶📶📶📶📶📶⬛ \n⬛⬛⬛⬛⬛⬛📶⬛⬛ \n⬛⬛⬛⬛⬛📶⬛⬛⬛ \n⬛⬛⬛⬛📶⬛⬛⬛⬛ \n⬛📶📶📶📶📶📶📶⬛ \n⬛⬛⬛⬛⬛⬛⬛⬛⬛ \n⬛⬛📶📶📶📶📶⬛⬛ \n⬛📶⬛⬛⬛⬛⬛📶⬛ \n⬛📶⬛⬛⬛⬛⬛📶⬛ \n⬛📶⬛📶⬛⬛⬛📶⬛ \n⬛⬛📶📶⬛⬛📶⬛⬛ \n⬛⬛⬛⬛⬛⬛⬛⬛⬛",
"⬛⬛⬛⬛⬛⬛⬛⬛⬛ \n⬛📶📶📶📶📶📶📶⬛ \n⬛⬛⬛⬛📶⬛⬛📶⬛ \n⬛⬛⬛⬛📶⬛⬛📶⬛ \n⬛⬛⬛⬛📶⬛⬛📶⬛ \n⬛⬛⬛⬛⬛📶📶⬛⬛ \n⬛⬛⬛⬛⬛⬛⬛⬛⬛ \n⬛📶⬛⬛⬛⬛⬛📶⬛ \n⬛📶📶📶📶📶📶📶⬛ \n⬛📶⬛⬛⬛⬛⬛📶⬛ \n⬛⬛⬛⬛⬛⬛⬛⬛⬛ \n⬛📶📶📶📶📶📶📶⬛ \n⬛⬛⬛⬛⬛⬛📶⬛⬛ \n⬛⬛⬛⬛⬛📶⬛⬛⬛ \n⬛⬛⬛⬛📶⬛⬛⬛⬛ \n⬛📶📶📶📶📶📶📶⬛ \n⬛⬛⬛⬛⬛⬛⬛⬛⬛ \n⬛⬛📶📶📶📶📶⬛⬛ \n⬛📶⬛⬛⬛⬛⬛📶⬛ \n⬛📶⬛⬛⬛⬛⬛📶⬛ \n⬛📶⬛📶⬛⬛⬛📶⬛ \n⬛⬛📶📶⬛⬛📶⬛⬛ \n⬛⬛⬛⬛⬛⬛⬛⬛⬛ \n⬛📶⬛📶📶📶📶📶⬛",
"⬛⬛⬛⬛⬛⬛⬛⬛⬛ \n⬛📶📶📶📶📶📶📶⬛ \n⬛⬛⬛⬛📶⬛⬛📶⬛ \n⬛⬛⬛⬛📶⬛⬛📶⬛ \n⬛⬛⬛⬛📶⬛⬛📶⬛ \n⬛⬛⬛⬛⬛📶📶⬛⬛ \n⬛⬛⬛⬛⬛⬛⬛⬛⬛ \n⬛📶⬛⬛⬛⬛⬛📶⬛ \n⬛📶📶📶📶📶📶📶⬛ \n⬛📶⬛⬛⬛⬛⬛📶⬛ \n⬛⬛⬛⬛⬛⬛⬛⬛⬛ \n⬛📶📶📶📶📶📶📶⬛ \n⬛⬛⬛⬛⬛⬛📶⬛⬛ \n⬛⬛⬛⬛⬛📶⬛⬛⬛ \n⬛⬛⬛⬛📶⬛⬛⬛⬛ \n⬛📶📶📶📶📶📶📶⬛ \n⬛⬛⬛⬛⬛⬛⬛⬛⬛ \n⬛⬛📶📶📶📶📶⬛⬛ \n⬛📶⬛⬛⬛⬛⬛📶⬛ \n⬛📶⬛⬛⬛⬛⬛📶⬛ \n⬛📶⬛📶⬛⬛⬛📶⬛ \n⬛⬛📶📶⬛⬛📶⬛⬛ \n⬛⬛⬛⬛⬛⬛⬛⬛⬛ \n⬛📶⬛📶📶📶📶📶⬛ \n⬛⬛⬛⬛⬛⬛⬛⬛⬛",
"⬛⬛⬛⬛⬛⬛⬛⬛⬛ \n⬛📶📶📶📶📶📶📶⬛ \n⬛⬛⬛⬛📶⬛⬛📶⬛ \n⬛⬛⬛⬛📶⬛⬛📶⬛ \n⬛⬛⬛⬛📶⬛⬛📶⬛ \n⬛⬛⬛⬛⬛📶📶⬛⬛ \n⬛⬛⬛⬛⬛⬛⬛⬛⬛ \n⬛📶⬛⬛⬛⬛⬛📶⬛ \n⬛📶📶📶📶📶📶📶⬛ \n⬛📶⬛⬛⬛⬛⬛📶⬛ \n⬛⬛⬛⬛⬛⬛⬛⬛⬛ \n⬛📶📶📶📶📶📶📶⬛ \n⬛⬛⬛⬛⬛⬛📶⬛⬛ \n⬛⬛⬛⬛⬛📶⬛⬛⬛ \n⬛⬛⬛⬛📶⬛⬛⬛⬛ \n⬛📶📶📶📶📶📶📶⬛ \n⬛⬛⬛⬛⬛⬛⬛⬛⬛ \n⬛⬛📶📶📶📶📶⬛⬛ \n⬛📶⬛⬛⬛⬛⬛📶⬛ \n⬛📶⬛⬛⬛⬛⬛📶⬛ \n⬛📶⬛📶⬛⬛⬛📶⬛ \n⬛⬛📶📶⬛⬛📶⬛⬛ \n⬛⬛⬛⬛⬛⬛⬛⬛⬛ \n⬛📶⬛📶📶📶📶📶⬛ \n⬛⬛⬛⬛⬛⬛⬛⬛⬛ \n \n My 🇵 🇮 🇳 🇬 Is : Calculating...",
]
for i in animation_ttl:
await asyncio.sleep(animation_interval)
await event.edit(animation_chars[i % 26])
end = datetime.now()
ms = (end - start).microseconds / 1000
await event.edit(
f"⬛⬛⬛⬛⬛⬛⬛⬛⬛\n⬛📶📶📶📶📶📶📶⬛\n⬛⬛⬛⬛📶⬛⬛📶⬛\n⬛⬛⬛⬛📶⬛⬛📶⬛\n⬛⬛⬛⬛📶⬛⬛📶⬛\n⬛⬛⬛⬛⬛📶📶⬛⬛\n⬛⬛⬛⬛⬛⬛⬛⬛⬛\n⬛⬛📶📶📶📶📶⬛⬛\n⬛📶⬛⬛⬛⬛⬛📶⬛\n⬛📶⬛⬛⬛⬛⬛📶⬛\n⬛📶⬛⬛⬛⬛⬛📶⬛\n⬛⬛📶📶📶📶📶⬛⬛\n⬛⬛⬛⬛⬛⬛⬛⬛⬛\n⬛📶📶📶📶📶📶📶⬛\n⬛⬛⬛⬛⬛⬛📶⬛⬛\n⬛⬛⬛⬛⬛📶⬛⬛⬛\n⬛⬛⬛⬛📶⬛⬛⬛⬛\n⬛📶📶📶📶📶📶📶⬛\n⬛⬛⬛⬛⬛⬛⬛⬛⬛\n⬛⬛📶📶📶📶📶⬛⬛\n⬛📶⬛⬛⬛⬛⬛📶⬛\n⬛📶⬛⬛⬛⬛⬛📶⬛\n⬛📶⬛📶⬛⬛⬛📶⬛\n⬛⬛📶📶⬛⬛📶⬛⬛\n⬛⬛⬛⬛⬛⬛⬛⬛⬛\n⬛📶⬛📶📶📶📶📶⬛\n⬛⬛⬛⬛⬛⬛⬛⬛⬛ \n \n \n My 🇵 🇮 🇳 🇬 Is : {ms} ms"
)
| 84.188889
| 410
| 0.189389
| 991
| 7,577
| 5.789102
| 0.116044
| 0.134216
| 0.149556
| 0.188252
| 0.839289
| 0.828482
| 0.806868
| 0.806868
| 0.781767
| 0.745163
| 0
| 0.004144
| 0.140161
| 7,577
| 89
| 411
| 85.134831
| 0.212586
| 0
| 0
| 0.156627
| 0
| 0.253012
| 0.764551
| 0.040121
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| false
| 0
| 0.048193
| 0
| 0.048193
| 0
| 0
| 0
| 0
| null | 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 1
| 1
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 10
|
9c8a70a92c6ce0d6f45be4060fbbfc316ca03dd1
| 118
|
py
|
Python
|
app/modules/home/forms.py
|
willypuzzle/flask-foundation
|
3d4a09470d8303905969f19a990baa0945fdf1d0
|
[
"BSD-2-Clause"
] | null | null | null |
app/modules/home/forms.py
|
willypuzzle/flask-foundation
|
3d4a09470d8303905969f19a990baa0945fdf1d0
|
[
"BSD-2-Clause"
] | null | null | null |
app/modules/home/forms.py
|
willypuzzle/flask-foundation
|
3d4a09470d8303905969f19a990baa0945fdf1d0
|
[
"BSD-2-Clause"
] | null | null | null |
from flask_wtf import FlaskForm as Form
from wtforms import StringField, PasswordField
from wtforms import validators
| 29.5
| 46
| 0.864407
| 16
| 118
| 6.3125
| 0.6875
| 0.217822
| 0.336634
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.127119
| 118
| 3
| 47
| 39.333333
| 0.980583
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| true
| 0.333333
| 1
| 0
| 1
| 0
| 1
| 0
| 0
| null | 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 1
| 1
| 0
| 1
| 0
|
0
| 8
|
140d9fb3d831ebc6dbbcc3dbe2c93a97a784dd2c
| 26,621
|
py
|
Python
|
tests/components/zwave_js/test_fan.py
|
MrDelik/core
|
93a66cc357b226389967668441000498a10453bb
|
[
"Apache-2.0"
] | 30,023
|
2016-04-13T10:17:53.000Z
|
2020-03-02T12:56:31.000Z
|
tests/components/zwave_js/test_fan.py
|
MrDelik/core
|
93a66cc357b226389967668441000498a10453bb
|
[
"Apache-2.0"
] | 24,710
|
2016-04-13T08:27:26.000Z
|
2020-03-02T12:59:13.000Z
|
tests/components/zwave_js/test_fan.py
|
MrDelik/core
|
93a66cc357b226389967668441000498a10453bb
|
[
"Apache-2.0"
] | 11,956
|
2016-04-13T18:42:31.000Z
|
2020-03-02T09:32:12.000Z
|
"""Test the Z-Wave JS fan platform."""
import copy
import math
import pytest
from voluptuous.error import MultipleInvalid
from zwave_js_server.const import CommandClass
from zwave_js_server.event import Event
from zwave_js_server.model.node import Node
from homeassistant.components.fan import (
ATTR_PERCENTAGE,
ATTR_PERCENTAGE_STEP,
ATTR_PRESET_MODE,
ATTR_PRESET_MODES,
DOMAIN as FAN_DOMAIN,
SERVICE_SET_PRESET_MODE,
SUPPORT_PRESET_MODE,
NotValidPresetModeError,
)
from homeassistant.components.zwave_js.fan import ATTR_FAN_STATE
from homeassistant.const import (
ATTR_ENTITY_ID,
ATTR_SUPPORTED_FEATURES,
SERVICE_TURN_OFF,
SERVICE_TURN_ON,
STATE_OFF,
STATE_ON,
STATE_UNAVAILABLE,
STATE_UNKNOWN,
)
from homeassistant.exceptions import HomeAssistantError
from homeassistant.helpers import entity_registry
async def test_generic_fan(hass, client, fan_generic, integration):
"""Test the fan entity for a generic fan that lacks specific speed configuration."""
node = fan_generic
entity_id = "fan.generic_fan_controller"
state = hass.states.get(entity_id)
assert state
assert state.state == "off"
# Test turn on setting speed
await hass.services.async_call(
"fan",
"turn_on",
{"entity_id": entity_id, "percentage": 66},
blocking=True,
)
assert len(client.async_send_command.call_args_list) == 1
args = client.async_send_command.call_args[0][0]
assert args["command"] == "node.set_value"
assert args["nodeId"] == 17
assert args["valueId"] == {
"commandClassName": "Multilevel Switch",
"commandClass": 38,
"endpoint": 0,
"property": "targetValue",
"propertyName": "targetValue",
"metadata": {
"label": "Target value",
"max": 99,
"min": 0,
"type": "number",
"readable": True,
"writeable": True,
},
}
assert args["value"] == 66
client.async_send_command.reset_mock()
# Test setting unknown speed
with pytest.raises(MultipleInvalid):
await hass.services.async_call(
"fan",
"set_percentage",
{"entity_id": entity_id, "percentage": "bad"},
blocking=True,
)
client.async_send_command.reset_mock()
# Test turn on no speed
await hass.services.async_call(
"fan",
"turn_on",
{"entity_id": entity_id},
blocking=True,
)
assert len(client.async_send_command.call_args_list) == 1
args = client.async_send_command.call_args[0][0]
assert args["command"] == "node.set_value"
assert args["nodeId"] == 17
assert args["valueId"] == {
"commandClassName": "Multilevel Switch",
"commandClass": 38,
"endpoint": 0,
"property": "targetValue",
"propertyName": "targetValue",
"metadata": {
"label": "Target value",
"max": 99,
"min": 0,
"type": "number",
"readable": True,
"writeable": True,
},
}
assert args["value"] == 255
client.async_send_command.reset_mock()
# Test turning off
await hass.services.async_call(
"fan",
"turn_off",
{"entity_id": entity_id},
blocking=True,
)
assert len(client.async_send_command.call_args_list) == 1
args = client.async_send_command.call_args[0][0]
assert args["command"] == "node.set_value"
assert args["nodeId"] == 17
assert args["valueId"] == {
"commandClassName": "Multilevel Switch",
"commandClass": 38,
"endpoint": 0,
"property": "targetValue",
"propertyName": "targetValue",
"metadata": {
"label": "Target value",
"max": 99,
"min": 0,
"type": "number",
"readable": True,
"writeable": True,
},
}
assert args["value"] == 0
client.async_send_command.reset_mock()
# Test speed update from value updated event
event = Event(
type="value updated",
data={
"source": "node",
"event": "value updated",
"nodeId": 17,
"args": {
"commandClassName": "Multilevel Switch",
"commandClass": 38,
"endpoint": 0,
"property": "currentValue",
"newValue": 99,
"prevValue": 0,
"propertyName": "currentValue",
},
},
)
node.receive_event(event)
state = hass.states.get(entity_id)
assert state.state == "on"
assert state.attributes[ATTR_PERCENTAGE] == 100
client.async_send_command.reset_mock()
event = Event(
type="value updated",
data={
"source": "node",
"event": "value updated",
"nodeId": 17,
"args": {
"commandClassName": "Multilevel Switch",
"commandClass": 38,
"endpoint": 0,
"property": "currentValue",
"newValue": 0,
"prevValue": 0,
"propertyName": "currentValue",
},
},
)
node.receive_event(event)
state = hass.states.get(entity_id)
assert state.state == "off"
assert state.attributes[ATTR_PERCENTAGE] == 0
async def test_configurable_speeds_fan(hass, client, hs_fc200, integration):
"""Test a fan entity with configurable speeds."""
node = hs_fc200
node_id = 39
entity_id = "fan.scene_capable_fan_control_switch"
async def get_zwave_speed_from_percentage(percentage):
"""Set the fan to a particular percentage and get the resulting Zwave speed."""
client.async_send_command.reset_mock()
await hass.services.async_call(
"fan",
"turn_on",
{"entity_id": entity_id, "percentage": percentage},
blocking=True,
)
assert len(client.async_send_command.call_args_list) == 1
args = client.async_send_command.call_args[0][0]
assert args["command"] == "node.set_value"
assert args["nodeId"] == node_id
return args["value"]
async def get_percentage_from_zwave_speed(zwave_speed):
"""Set the underlying device speed and get the resulting percentage."""
event = Event(
type="value updated",
data={
"source": "node",
"event": "value updated",
"nodeId": node_id,
"args": {
"commandClassName": "Multilevel Switch",
"commandClass": 38,
"endpoint": 0,
"property": "currentValue",
"newValue": zwave_speed,
"prevValue": 0,
"propertyName": "currentValue",
},
},
)
node.receive_event(event)
state = hass.states.get(entity_id)
return state.attributes[ATTR_PERCENTAGE]
# In 3-speed mode, the speeds are:
# low = 1-33, med=34-66, high=67-99
percentages_to_zwave_speeds = [
[[0], [0]],
[range(1, 34), range(1, 34)],
[range(34, 68), range(34, 67)],
[range(68, 101), range(67, 100)],
]
for percentages, zwave_speeds in percentages_to_zwave_speeds:
for percentage in percentages:
actual_zwave_speed = await get_zwave_speed_from_percentage(percentage)
assert actual_zwave_speed in zwave_speeds
for zwave_speed in zwave_speeds:
actual_percentage = await get_percentage_from_zwave_speed(zwave_speed)
assert actual_percentage in percentages
state = hass.states.get(entity_id)
assert math.isclose(state.attributes[ATTR_PERCENTAGE_STEP], 33.3333, rel_tol=1e-3)
assert state.attributes[ATTR_PRESET_MODES] == []
async def test_configurable_speeds_fan_with_missing_config_value(
hass, client, hs_fc200_state, integration
):
"""Test a fan entity with configurable speeds."""
entity_id = "fan.scene_capable_fan_control_switch"
# Attach a modified version of the node with a bad config
bad_node_data = copy.deepcopy(hs_fc200_state)
fan_type_value = next(
(
v
for v in bad_node_data["values"]
if v["endpoint"] == 0 and v["commandClass"] == 112 and v["property"] == 5
),
None,
)
assert fan_type_value is not None
bad_node_data["values"].remove(fan_type_value)
node = Node(client, bad_node_data)
event = {"node": node}
client.driver.controller.emit("node added", event)
await hass.async_block_till_done()
state = hass.states.get(entity_id)
assert state.state == STATE_UNAVAILABLE
async def test_configurable_speeds_fan_with_bad_config_value(
hass, client, hs_fc200_state, integration
):
"""Test a fan entity with configurable speeds."""
entity_id = "fan.scene_capable_fan_control_switch"
# Attach a modified version of the node with a bad config
bad_node_data = copy.deepcopy(hs_fc200_state)
fan_type_value = next(
(
v
for v in bad_node_data["values"]
if v["endpoint"] == 0 and v["commandClass"] == 112 and v["property"] == 5
),
None,
)
assert fan_type_value is not None
# 42 is not a valid configuration option with this device
fan_type_value["value"] = 42
node = Node(client, bad_node_data)
event = {"node": node}
client.driver.controller.emit("node added", event)
await hass.async_block_till_done()
state = hass.states.get(entity_id)
assert state.state == STATE_UNAVAILABLE
async def test_fixed_speeds_fan(hass, client, ge_12730, integration):
"""Test a fan entity with fixed speeds."""
node = ge_12730
node_id = 24
entity_id = "fan.in_wall_smart_fan_control"
async def get_zwave_speed_from_percentage(percentage):
"""Set the fan to a particular percentage and get the resulting Zwave speed."""
client.async_send_command.reset_mock()
await hass.services.async_call(
"fan",
"turn_on",
{"entity_id": entity_id, "percentage": percentage},
blocking=True,
)
assert len(client.async_send_command.call_args_list) == 1
args = client.async_send_command.call_args[0][0]
assert args["command"] == "node.set_value"
assert args["nodeId"] == node_id
return args["value"]
async def get_percentage_from_zwave_speed(zwave_speed):
"""Set the underlying device speed and get the resulting percentage."""
event = Event(
type="value updated",
data={
"source": "node",
"event": "value updated",
"nodeId": node_id,
"args": {
"commandClassName": "Multilevel Switch",
"commandClass": 38,
"endpoint": 0,
"property": "currentValue",
"newValue": zwave_speed,
"prevValue": 0,
"propertyName": "currentValue",
},
},
)
node.receive_event(event)
state = hass.states.get(entity_id)
return state.attributes[ATTR_PERCENTAGE]
# This device has the speeds:
# low = 1-33, med = 34-67, high = 68-99
percentages_to_zwave_speeds = [
[[0], [0]],
[range(1, 34), range(1, 34)],
[range(34, 68), range(34, 68)],
[range(68, 101), range(68, 100)],
]
for percentages, zwave_speeds in percentages_to_zwave_speeds:
for percentage in percentages:
actual_zwave_speed = await get_zwave_speed_from_percentage(percentage)
assert actual_zwave_speed in zwave_speeds
for zwave_speed in zwave_speeds:
actual_percentage = await get_percentage_from_zwave_speed(zwave_speed)
assert actual_percentage in percentages
state = hass.states.get(entity_id)
assert math.isclose(state.attributes[ATTR_PERCENTAGE_STEP], 33.3333, rel_tol=1e-3)
assert state.attributes[ATTR_PRESET_MODES] == []
async def test_inovelli_lzw36(hass, client, inovelli_lzw36, integration):
"""Test an LZW36."""
node = inovelli_lzw36
node_id = 19
entity_id = "fan.family_room_combo_2"
async def get_zwave_speed_from_percentage(percentage):
"""Set the fan to a particular percentage and get the resulting Zwave speed."""
client.async_send_command.reset_mock()
await hass.services.async_call(
"fan",
"turn_on",
{"entity_id": entity_id, "percentage": percentage},
blocking=True,
)
assert len(client.async_send_command.call_args_list) == 1
args = client.async_send_command.call_args[0][0]
assert args["command"] == "node.set_value"
assert args["nodeId"] == node_id
return args["value"]
async def set_zwave_speed(zwave_speed):
"""Set the underlying device speed."""
event = Event(
type="value updated",
data={
"source": "node",
"event": "value updated",
"nodeId": node_id,
"args": {
"commandClassName": "Multilevel Switch",
"commandClass": 38,
"endpoint": 2,
"property": "currentValue",
"newValue": zwave_speed,
"prevValue": 0,
"propertyName": "currentValue",
},
},
)
node.receive_event(event)
async def get_percentage_from_zwave_speed(zwave_speed):
"""Set the underlying device speed and get the resulting percentage."""
await set_zwave_speed(zwave_speed)
state = hass.states.get(entity_id)
return state.attributes[ATTR_PERCENTAGE]
# This device has the speeds:
# low = 2-33, med = 34-66, high = 67-99
percentages_to_zwave_speeds = [
[[0], [0]],
[range(1, 34), range(2, 34)],
[range(34, 68), range(34, 67)],
[range(68, 101), range(67, 100)],
]
for percentages, zwave_speeds in percentages_to_zwave_speeds:
for percentage in percentages:
actual_zwave_speed = await get_zwave_speed_from_percentage(percentage)
assert actual_zwave_speed in zwave_speeds
for zwave_speed in zwave_speeds:
actual_percentage = await get_percentage_from_zwave_speed(zwave_speed)
assert actual_percentage in percentages
# Check static entity properties
state = hass.states.get(entity_id)
assert math.isclose(state.attributes[ATTR_PERCENTAGE_STEP], 33.3333, rel_tol=1e-3)
assert state.attributes[ATTR_PRESET_MODES] == ["breeze"]
# This device has one preset, where a device level of "1" is the
# "breeze" mode
await set_zwave_speed(1)
state = hass.states.get(entity_id)
assert state.attributes[ATTR_PRESET_MODE] == "breeze"
assert state.attributes[ATTR_PERCENTAGE] is None
client.async_send_command.reset_mock()
await hass.services.async_call(
"fan",
"turn_on",
{"entity_id": entity_id, "preset_mode": "breeze"},
blocking=True,
)
assert len(client.async_send_command.call_args_list) == 1
args = client.async_send_command.call_args[0][0]
assert args["command"] == "node.set_value"
assert args["nodeId"] == node_id
assert args["value"] == 1
client.async_send_command.reset_mock()
with pytest.raises(NotValidPresetModeError):
await hass.services.async_call(
"fan",
"turn_on",
{"entity_id": entity_id, "preset_mode": "wheeze"},
blocking=True,
)
assert len(client.async_send_command.call_args_list) == 0
async def test_thermostat_fan(hass, client, climate_adc_t3000, integration):
"""Test the fan entity for a z-wave fan."""
node = climate_adc_t3000
entity_id = "fan.adc_t3000"
registry = entity_registry.async_get(hass)
state = hass.states.get(entity_id)
assert state is None
entry = registry.async_get(entity_id)
assert entry
assert entry.disabled
assert entry.disabled_by is entity_registry.RegistryEntryDisabler.INTEGRATION
# Test enabling entity
updated_entry = registry.async_update_entity(entity_id, disabled_by=None)
assert updated_entry != entry
assert updated_entry.disabled is False
await hass.config_entries.async_reload(integration.entry_id)
await hass.async_block_till_done()
state = hass.states.get(entity_id)
assert state
assert state.state == STATE_ON
assert state.attributes.get(ATTR_FAN_STATE) == "Idle / off"
assert state.attributes.get(ATTR_PRESET_MODE) == "Auto low"
assert state.attributes.get(ATTR_SUPPORTED_FEATURES) == SUPPORT_PRESET_MODE
# Test setting preset mode
await hass.services.async_call(
FAN_DOMAIN,
SERVICE_SET_PRESET_MODE,
{ATTR_ENTITY_ID: entity_id, ATTR_PRESET_MODE: "Low"},
blocking=True,
)
assert len(client.async_send_command.call_args_list) == 1
args = client.async_send_command.call_args[0][0]
assert args["command"] == "node.set_value"
assert args["nodeId"] == 68
assert args["valueId"] == {
"ccVersion": 3,
"commandClassName": "Thermostat Fan Mode",
"commandClass": CommandClass.THERMOSTAT_FAN_MODE.value,
"endpoint": 0,
"property": "mode",
"propertyName": "mode",
"metadata": {
"label": "Thermostat fan mode",
"max": 255,
"min": 0,
"type": "number",
"readable": True,
"writeable": True,
"states": {"0": "Auto low", "1": "Low", "6": "Circulation"},
},
"value": 0,
}
assert args["value"] == 1
client.async_send_command.reset_mock()
# Test setting unknown preset mode
with pytest.raises(ValueError):
await hass.services.async_call(
FAN_DOMAIN,
SERVICE_SET_PRESET_MODE,
{ATTR_ENTITY_ID: entity_id, ATTR_PRESET_MODE: "Turbo"},
blocking=True,
)
client.async_send_command.reset_mock()
# Test turning off
await hass.services.async_call(
FAN_DOMAIN,
SERVICE_TURN_OFF,
{ATTR_ENTITY_ID: entity_id},
blocking=True,
)
assert len(client.async_send_command.call_args_list) == 1
args = client.async_send_command.call_args[0][0]
assert args["command"] == "node.set_value"
assert args["nodeId"] == 68
assert args["valueId"] == {
"ccVersion": 3,
"commandClassName": "Thermostat Fan Mode",
"commandClass": CommandClass.THERMOSTAT_FAN_MODE.value,
"endpoint": 0,
"property": "off",
"propertyName": "off",
"metadata": {
"label": "Thermostat fan turned off",
"type": "boolean",
"readable": True,
"writeable": True,
},
"value": False,
}
assert args["value"]
client.async_send_command.reset_mock()
# Test turning on
await hass.services.async_call(
FAN_DOMAIN,
SERVICE_TURN_ON,
{ATTR_ENTITY_ID: entity_id},
blocking=True,
)
assert len(client.async_send_command.call_args_list) == 1
args = client.async_send_command.call_args[0][0]
assert args["command"] == "node.set_value"
assert args["nodeId"] == 68
assert args["valueId"] == {
"ccVersion": 3,
"commandClassName": "Thermostat Fan Mode",
"commandClass": CommandClass.THERMOSTAT_FAN_MODE.value,
"endpoint": 0,
"property": "off",
"propertyName": "off",
"metadata": {
"label": "Thermostat fan turned off",
"type": "boolean",
"readable": True,
"writeable": True,
},
"value": False,
}
assert not args["value"]
client.async_send_command.reset_mock()
# Test fan state update from value updated event
event = Event(
type="value updated",
data={
"source": "node",
"event": "value updated",
"nodeId": 68,
"args": {
"commandClassName": "Thermostat Fan State",
"commandClass": CommandClass.THERMOSTAT_FAN_STATE.value,
"endpoint": 0,
"property": "state",
"newValue": 4,
"prevValue": 0,
"propertyName": "state",
},
},
)
node.receive_event(event)
state = hass.states.get(entity_id)
assert state.attributes.get(ATTR_FAN_STATE) == "Circulation mode"
client.async_send_command.reset_mock()
# Test unknown fan state update from value updated event
event = Event(
type="value updated",
data={
"source": "node",
"event": "value updated",
"nodeId": 68,
"args": {
"commandClassName": "Thermostat Fan State",
"commandClass": CommandClass.THERMOSTAT_FAN_STATE.value,
"endpoint": 0,
"property": "state",
"newValue": 99,
"prevValue": 0,
"propertyName": "state",
},
},
)
node.receive_event(event)
state = hass.states.get(entity_id)
assert not state.attributes.get(ATTR_FAN_STATE)
client.async_send_command.reset_mock()
# Test fan mode update from value updated event
event = Event(
type="value updated",
data={
"source": "node",
"event": "value updated",
"nodeId": 68,
"args": {
"commandClassName": "Thermostat Fan Mode",
"commandClass": CommandClass.THERMOSTAT_FAN_MODE.value,
"endpoint": 0,
"property": "mode",
"newValue": 1,
"prevValue": 0,
"propertyName": "mode",
},
},
)
node.receive_event(event)
state = hass.states.get(entity_id)
assert state.attributes.get(ATTR_PRESET_MODE) == "Low"
client.async_send_command.reset_mock()
# Test fan mode update from value updated event for an unknown mode
event = Event(
type="value updated",
data={
"source": "node",
"event": "value updated",
"nodeId": 68,
"args": {
"commandClassName": "Thermostat Fan Mode",
"commandClass": CommandClass.THERMOSTAT_FAN_MODE.value,
"endpoint": 0,
"property": "mode",
"newValue": 79,
"prevValue": 0,
"propertyName": "mode",
},
},
)
node.receive_event(event)
state = hass.states.get(entity_id)
assert not state.attributes.get(ATTR_PRESET_MODE)
client.async_send_command.reset_mock()
# Test fan mode turned off update from value updated event
event = Event(
type="value updated",
data={
"source": "node",
"event": "value updated",
"nodeId": 68,
"args": {
"commandClassName": "Thermostat Fan Mode",
"commandClass": CommandClass.THERMOSTAT_FAN_MODE.value,
"endpoint": 0,
"property": "off",
"newValue": True,
"prevValue": False,
"propertyName": "off",
},
},
)
node.receive_event(event)
state = hass.states.get(entity_id)
assert state.state == STATE_OFF
async def test_thermostat_fan_without_off(
hass, client, climate_radio_thermostat_ct100_plus, integration
):
"""Test the fan entity for a z-wave fan without "off" property."""
entity_id = "fan.z_wave_thermostat"
registry = entity_registry.async_get(hass)
state = hass.states.get(entity_id)
assert state is None
entry = registry.async_get(entity_id)
assert entry
assert entry.disabled
assert entry.disabled_by is entity_registry.RegistryEntryDisabler.INTEGRATION
# Test enabling entity
updated_entry = registry.async_update_entity(entity_id, disabled_by=None)
assert updated_entry != entry
assert updated_entry.disabled is False
await hass.config_entries.async_reload(integration.entry_id)
await hass.async_block_till_done()
state = hass.states.get(entity_id)
assert state
assert state.state == STATE_UNKNOWN
# Test turning off
with pytest.raises(HomeAssistantError):
await hass.services.async_call(
FAN_DOMAIN,
SERVICE_TURN_OFF,
{ATTR_ENTITY_ID: entity_id},
blocking=True,
)
assert len(client.async_send_command.call_args_list) == 0
assert state.state == STATE_UNKNOWN
client.async_send_command.reset_mock()
# Test turning on
with pytest.raises(HomeAssistantError):
await hass.services.async_call(
FAN_DOMAIN,
SERVICE_TURN_ON,
{ATTR_ENTITY_ID: entity_id},
blocking=True,
)
assert len(client.async_send_command.call_args_list) == 0
assert state.state == STATE_UNKNOWN
client.async_send_command.reset_mock()
async def test_thermostat_fan_without_preset_modes(
hass, client, climate_adc_t3000_missing_fan_mode_states, integration
):
"""Test the fan entity for a z-wave fan without "states" metadata."""
entity_id = "fan.adc_t3000_missing_fan_mode_states"
registry = entity_registry.async_get(hass)
state = hass.states.get(entity_id)
assert state is None
entry = registry.async_get(entity_id)
assert entry
assert entry.disabled
assert entry.disabled_by is entity_registry.RegistryEntryDisabler.INTEGRATION
# Test enabling entity
updated_entry = registry.async_update_entity(entity_id, disabled_by=None)
assert updated_entry != entry
assert updated_entry.disabled is False
await hass.config_entries.async_reload(integration.entry_id)
await hass.async_block_till_done()
state = hass.states.get(entity_id)
assert state
assert not state.attributes.get(ATTR_PRESET_MODE)
assert not state.attributes.get(ATTR_PRESET_MODES)
| 31.767303
| 88
| 0.600541
| 2,984
| 26,621
| 5.127346
| 0.077748
| 0.036078
| 0.042157
| 0.06183
| 0.876209
| 0.856993
| 0.837124
| 0.82085
| 0.80719
| 0.782876
| 0
| 0.019136
| 0.291349
| 26,621
| 837
| 89
| 31.805257
| 0.79189
| 0.041396
| 0
| 0.751111
| 0
| 0
| 0.147407
| 0.009949
| 0
| 0
| 0
| 0
| 0.155556
| 1
| 0
| false
| 0
| 0.017778
| 0
| 0.026667
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 7
|
141a404fe0b759c276dc08d3d8a76fdbada2e512
| 5,506
|
py
|
Python
|
models.py
|
jurayev/fyyur
|
e7b5cf84191bf76f2e2c92b9eb90c05392a482b8
|
[
"MIT"
] | null | null | null |
models.py
|
jurayev/fyyur
|
e7b5cf84191bf76f2e2c92b9eb90c05392a482b8
|
[
"MIT"
] | null | null | null |
models.py
|
jurayev/fyyur
|
e7b5cf84191bf76f2e2c92b9eb90c05392a482b8
|
[
"MIT"
] | null | null | null |
from datetime import datetime, timezone
from config import db
class Show(db.Model):
__tablename__ = 'show'
id = db.Column(db.Integer, primary_key=True)
venue_id = db.Column(db.Integer, db.ForeignKey('venue.id'), nullable=False)
artist_id = db.Column(db.Integer, db.ForeignKey('artist.id'), nullable=False)
start_time = db.Column(db.DateTime(timezone=True), nullable=False)
def __repr__(self):
return f'<Show id: {self.id}>'
@property
def serialize(self):
return {
'id': self.id,
'venue_id': self.venue_id,
'venue_name': self.venue.name,
'artist_id': self.artist_id,
'artist_name': self.artist.name,
'artist_image_link': self.artist.image_link,
'start_time': self.start_time
}
class Venue(db.Model):
__tablename__ = 'venue'
id = db.Column(db.Integer, primary_key=True)
name = db.Column(db.String(120), nullable=False)
city = db.Column(db.String(120), nullable=False)
state = db.Column(db.String(120), nullable=False)
address = db.Column(db.String(120), nullable=False)
genres = db.Column(db.ARRAY(db.String(50)), nullable=False)
phone = db.Column(db.String(120), nullable=False)
image_link = db.Column(db.String(500), nullable=False)
facebook_link = db.Column(db.String(120), nullable=True)
website = db.Column(db.String(120), nullable=True)
seeking_talent = db.Column(db.Boolean, nullable=False, default=False)
seeking_description = db.Column(db.String(500), nullable=True)
shows = db.relationship("Show", cascade="all, delete", backref="venue")
def __repr__(self):
return f'<Venue id: {self.id}, name: {self.name}>'
@property
def serialize(self):
past_shows = self._get_past_shows()
upcoming_shows = self._get_upcoming_shows()
return {
'id': self.id,
'name': self.name,
'city': self.city,
'state': self.state,
'address': self.address,
'genres': self.genres,
'phone': self.phone,
'image_link': self.image_link,
'facebook_link': self.facebook_link,
'website': self.website,
'seeking_talent': self.seeking_talent,
'seeking_description': self.seeking_description,
'past_shows': past_shows,
'past_shows_count': len(past_shows),
'upcoming_shows': upcoming_shows,
'upcoming_shows_count': len(upcoming_shows)
}
@property
def serialize_by_name_id(self):
return {
'id': self.id,
'name': self.name,
'num_upcoming_shows': len(self._get_upcoming_shows())
}
@property
def serialize_by_city_and_state(self):
return {
'city': self.city,
'state': self.state,
'venues': self._get_venues_by_city_and_state()
}
def _get_upcoming_shows(self):
return list(filter(lambda s: s.start_time >= datetime.now(timezone.utc), self.shows))
def _get_past_shows(self):
return list(filter(lambda s: s.start_time < datetime.now(timezone.utc), self.shows))
def _get_venues_by_city_and_state(self):
venues = Venue.query.filter_by(city=self.city, state=self.state).all()
return [venue.serialize_by_name_id for venue in venues]
class Artist(db.Model):
__tablename__ = 'artist'
id = db.Column(db.Integer, primary_key=True)
name = db.Column(db.String(120), nullable=False)
city = db.Column(db.String(120), nullable=False)
state = db.Column(db.String(120), nullable=False)
genres = db.Column(db.ARRAY(db.String(50)), nullable=False)
phone = db.Column(db.String(120), nullable=False)
image_link = db.Column(db.String(500), nullable=False)
facebook_link = db.Column(db.String(120), nullable=True)
website = db.Column(db.String(120), nullable=True)
seeking_venue = db.Column(db.Boolean, nullable=False, default=False)
seeking_description = db.Column(db.String(500), nullable=True)
shows = db.relationship("Show", cascade="all, delete", backref="artist")
def __repr__(self):
return f'<Artist id: {self.id}, name: {self.name}>'
@property
def serialize(self):
past_shows = self._get_past_shows()
upcoming_shows = self._get_upcoming_shows()
return {
'id': self.id,
'name': self.name,
'city': self.city,
'state': self.state,
'genres': self.genres,
'phone': self.phone,
'image_link': self.image_link,
'facebook_link': self.facebook_link,
'website': self.website,
'seeking_venue': self.seeking_venue,
'seeking_description': self.seeking_description,
'past_shows': past_shows,
'past_shows_count': len(past_shows),
'upcoming_shows': upcoming_shows,
'upcoming_shows_count': len(upcoming_shows)
}
@property
def serialize_by_name_id(self):
return {
'id': self.id,
'name': self.name,
'num_upcoming_shows': len(self._get_upcoming_shows())
}
def _get_upcoming_shows(self):
return list(filter(lambda s: s.start_time >= datetime.now(timezone.utc), self.shows))
def _get_past_shows(self):
return list(filter(lambda s: s.start_time < datetime.now(timezone.utc), self.shows))
| 35.986928
| 93
| 0.621685
| 690
| 5,506
| 4.73913
| 0.107246
| 0.066055
| 0.082569
| 0.08318
| 0.820183
| 0.792049
| 0.749541
| 0.730581
| 0.720489
| 0.720489
| 0
| 0.01325
| 0.246095
| 5,506
| 152
| 94
| 36.223684
| 0.774512
| 0
| 0
| 0.697674
| 0
| 0
| 0.110425
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0.108527
| false
| 0
| 0.015504
| 0.085271
| 0.503876
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
|
0
| 7
|
141f403e44db479541c588540fc6deabe785bda7
| 10,948
|
py
|
Python
|
vsts/vsts/wiki/v4_1/wiki_client.py
|
kenkuo/azure-devops-python-api
|
9e920bd25e938fa89ff7f60153e5b9e113ca839d
|
[
"MIT"
] | null | null | null |
vsts/vsts/wiki/v4_1/wiki_client.py
|
kenkuo/azure-devops-python-api
|
9e920bd25e938fa89ff7f60153e5b9e113ca839d
|
[
"MIT"
] | null | null | null |
vsts/vsts/wiki/v4_1/wiki_client.py
|
kenkuo/azure-devops-python-api
|
9e920bd25e938fa89ff7f60153e5b9e113ca839d
|
[
"MIT"
] | null | null | null |
# --------------------------------------------------------------------------------------------
# Copyright (c) Microsoft Corporation. All rights reserved.
# Licensed under the MIT License. See License.txt in the project root for license information.
# --------------------------------------------------------------------------------------------
# Generated file, DO NOT EDIT
# Changes may cause incorrect behavior and will be lost if the code is regenerated.
# --------------------------------------------------------------------------------------------
from msrest import Serializer, Deserializer
from ...vss_client import VssClient
from . import models
class WikiClient(VssClient):
"""Wiki
:param str base_url: Service URL
:param Authentication creds: Authenticated credentials.
"""
def __init__(self, base_url=None, creds=None):
super(WikiClient, self).__init__(base_url, creds)
client_models = {k: v for k, v in models.__dict__.items() if isinstance(v, type)}
self._serialize = Serializer(client_models)
self._deserialize = Deserializer(client_models)
resource_area_identifier = 'bf7d82a0-8aa5-4613-94ef-6172a5ea01f3'
def get_page_text(self, project, wiki_identifier, path=None, recursion_level=None, version_descriptor=None, include_content=None):
"""GetPageText.
Gets metadata or content of the wiki page for the provided path. Content negotiation is done based on the `Accept` header sent in the request.
:param str project: Project ID or project name
:param str wiki_identifier: Wiki Id or name.
:param str path: Wiki page path.
:param str recursion_level: Recursion level for subpages retrieval. Defaults to `None` (Optional).
:param :class:`<GitVersionDescriptor> <wiki.v4_1.models.GitVersionDescriptor>` version_descriptor: GitVersionDescriptor for the page. Defaults to the default branch (Optional).
:param bool include_content: True to include the content of the page in the response for Json content type. Defaults to false (Optional)
:rtype: object
"""
route_values = {}
if project is not None:
route_values['project'] = self._serialize.url('project', project, 'str')
if wiki_identifier is not None:
route_values['wikiIdentifier'] = self._serialize.url('wiki_identifier', wiki_identifier, 'str')
query_parameters = {}
if path is not None:
query_parameters['path'] = self._serialize.query('path', path, 'str')
if recursion_level is not None:
query_parameters['recursionLevel'] = self._serialize.query('recursion_level', recursion_level, 'str')
if version_descriptor is not None:
if version_descriptor.version_type is not None:
query_parameters['versionDescriptor.versionType'] = version_descriptor.version_type
if version_descriptor.version is not None:
query_parameters['versionDescriptor.version'] = version_descriptor.version
if version_descriptor.version_options is not None:
query_parameters['versionDescriptor.versionOptions'] = version_descriptor.version_options
if include_content is not None:
query_parameters['includeContent'] = self._serialize.query('include_content', include_content, 'bool')
response = self._send(http_method='GET',
location_id='25d3fbc7-fe3d-46cb-b5a5-0b6f79caf27b',
version='4.1',
route_values=route_values,
query_parameters=query_parameters)
return self._deserialize('object', response)
def get_page_zip(self, project, wiki_identifier, path=None, recursion_level=None, version_descriptor=None, include_content=None):
"""GetPageZip.
Gets metadata or content of the wiki page for the provided path. Content negotiation is done based on the `Accept` header sent in the request.
:param str project: Project ID or project name
:param str wiki_identifier: Wiki Id or name.
:param str path: Wiki page path.
:param str recursion_level: Recursion level for subpages retrieval. Defaults to `None` (Optional).
:param :class:`<GitVersionDescriptor> <wiki.v4_1.models.GitVersionDescriptor>` version_descriptor: GitVersionDescriptor for the page. Defaults to the default branch (Optional).
:param bool include_content: True to include the content of the page in the response for Json content type. Defaults to false (Optional)
:rtype: object
"""
route_values = {}
if project is not None:
route_values['project'] = self._serialize.url('project', project, 'str')
if wiki_identifier is not None:
route_values['wikiIdentifier'] = self._serialize.url('wiki_identifier', wiki_identifier, 'str')
query_parameters = {}
if path is not None:
query_parameters['path'] = self._serialize.query('path', path, 'str')
if recursion_level is not None:
query_parameters['recursionLevel'] = self._serialize.query('recursion_level', recursion_level, 'str')
if version_descriptor is not None:
if version_descriptor.version_type is not None:
query_parameters['versionDescriptor.versionType'] = version_descriptor.version_type
if version_descriptor.version is not None:
query_parameters['versionDescriptor.version'] = version_descriptor.version
if version_descriptor.version_options is not None:
query_parameters['versionDescriptor.versionOptions'] = version_descriptor.version_options
if include_content is not None:
query_parameters['includeContent'] = self._serialize.query('include_content', include_content, 'bool')
response = self._send(http_method='GET',
location_id='25d3fbc7-fe3d-46cb-b5a5-0b6f79caf27b',
version='4.1',
route_values=route_values,
query_parameters=query_parameters)
return self._deserialize('object', response)
def create_wiki(self, wiki_create_params, project=None):
"""CreateWiki.
Creates the wiki resource.
:param :class:`<WikiCreateParametersV2> <wiki.v4_1.models.WikiCreateParametersV2>` wiki_create_params: Parameters for the wiki creation.
:param str project: Project ID or project name
:rtype: :class:`<WikiV2> <wiki.v4_1.models.WikiV2>`
"""
route_values = {}
if project is not None:
route_values['project'] = self._serialize.url('project', project, 'str')
content = self._serialize.body(wiki_create_params, 'WikiCreateParametersV2')
response = self._send(http_method='POST',
location_id='288d122c-dbd4-451d-aa5f-7dbbba070728',
version='4.1',
route_values=route_values,
content=content)
return self._deserialize('WikiV2', response)
def delete_wiki(self, wiki_identifier, project=None):
"""DeleteWiki.
Deletes the wiki corresponding to the wiki name or Id provided.
:param str wiki_identifier: Wiki name or Id.
:param str project: Project ID or project name
:rtype: :class:`<WikiV2> <wiki.v4_1.models.WikiV2>`
"""
route_values = {}
if project is not None:
route_values['project'] = self._serialize.url('project', project, 'str')
if wiki_identifier is not None:
route_values['wikiIdentifier'] = self._serialize.url('wiki_identifier', wiki_identifier, 'str')
response = self._send(http_method='DELETE',
location_id='288d122c-dbd4-451d-aa5f-7dbbba070728',
version='4.1',
route_values=route_values)
return self._deserialize('WikiV2', response)
def get_all_wikis(self, project=None):
"""GetAllWikis.
Gets all wikis in a project or collection.
:param str project: Project ID or project name
:rtype: [WikiV2]
"""
route_values = {}
if project is not None:
route_values['project'] = self._serialize.url('project', project, 'str')
response = self._send(http_method='GET',
location_id='288d122c-dbd4-451d-aa5f-7dbbba070728',
version='4.1',
route_values=route_values,
returns_collection=True)
return self._deserialize('[WikiV2]', response)
def get_wiki(self, wiki_identifier, project=None):
"""GetWiki.
Gets the wiki corresponding to the wiki name or Id provided.
:param str wiki_identifier: Wiki name or id.
:param str project: Project ID or project name
:rtype: :class:`<WikiV2> <wiki.v4_1.models.WikiV2>`
"""
route_values = {}
if project is not None:
route_values['project'] = self._serialize.url('project', project, 'str')
if wiki_identifier is not None:
route_values['wikiIdentifier'] = self._serialize.url('wiki_identifier', wiki_identifier, 'str')
response = self._send(http_method='GET',
location_id='288d122c-dbd4-451d-aa5f-7dbbba070728',
version='4.1',
route_values=route_values)
return self._deserialize('WikiV2', response)
def update_wiki(self, update_parameters, wiki_identifier, project=None):
"""UpdateWiki.
Updates the wiki corresponding to the wiki Id or name provided using the update parameters.
:param :class:`<WikiUpdateParameters> <wiki.v4_1.models.WikiUpdateParameters>` update_parameters: Update parameters.
:param str wiki_identifier: Wiki name or Id.
:param str project: Project ID or project name
:rtype: :class:`<WikiV2> <wiki.v4_1.models.WikiV2>`
"""
route_values = {}
if project is not None:
route_values['project'] = self._serialize.url('project', project, 'str')
if wiki_identifier is not None:
route_values['wikiIdentifier'] = self._serialize.url('wiki_identifier', wiki_identifier, 'str')
content = self._serialize.body(update_parameters, 'WikiUpdateParameters')
response = self._send(http_method='PATCH',
location_id='288d122c-dbd4-451d-aa5f-7dbbba070728',
version='4.1',
route_values=route_values,
content=content)
return self._deserialize('WikiV2', response)
| 56.725389
| 184
| 0.625776
| 1,196
| 10,948
| 5.544314
| 0.147993
| 0.054743
| 0.035289
| 0.025336
| 0.805308
| 0.790228
| 0.775449
| 0.768813
| 0.768813
| 0.762479
| 0
| 0.023086
| 0.260139
| 10,948
| 192
| 185
| 57.020833
| 0.795432
| 0.049233
| 0
| 0.79661
| 0
| 0
| 0.140604
| 0.066118
| 0
| 0
| 0
| 0
| 0
| 0
| null | null | 0
| 0.025424
| null | null | 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 8
|
142489c7ce8efda0cdb880e13bff17933d7ed7e0
| 104,226
|
py
|
Python
|
Uncertainty/data/case-de/case_de_129.py
|
thanever/SOC
|
9f30d1a9c7610a68de9c178a1170bdf1c8ca11d4
|
[
"MIT"
] | null | null | null |
Uncertainty/data/case-de/case_de_129.py
|
thanever/SOC
|
9f30d1a9c7610a68de9c178a1170bdf1c8ca11d4
|
[
"MIT"
] | null | null | null |
Uncertainty/data/case-de/case_de_129.py
|
thanever/SOC
|
9f30d1a9c7610a68de9c178a1170bdf1c8ca11d4
|
[
"MIT"
] | null | null | null |
from numpy import array
def case_de_129():
ppc = {"version": '2'}
ppc["baseMVA"] = 100.0
ppc["bus"] = array([
[75, 2, 116.01, 23.2, 0, 0, 5, 1.0, 0, 380.0, 0, 1.1, 0.9, 0.6, 10 ],
[502, 2, 256.62, 51.32, 0, 0, 5, 1.0, 0, 220.0, 0, 1.1, 0.9, 0.6, 10 ],
[44, 2, 158.16, 31.63, 0, 0, 5, 1.0, 0, 220.0, 0, 1.1, 0.9, 0.6, 10 ],
[492, 2, 87.31, 17.46, 0, 0, 5, 1.0, 0, 220.0, 0, 1.1, 0.9, 0.6, 10 ],
[180, 2, 50.65, 10.13, 0, 0, 5, 1.0, 0, 220.0, 0, 1.1, 0.9, 0.6, 10 ],
[46, 1, 0, 0, 0, 0, 5, -2.0105050150091184e+17,0, 380.0, 0, 1.1, 0.9, 0.6, 10 ],
[21, 2, 1016.68, 203.34, 0, 0, 5, 1.0, 0, 380.0, 0, 1.1, 0.9, 0.6, 10 ],
[33, 2, 209.31, 41.86, 0, 0, 5, 1.0, 0, 380.0, 0, 1.1, 0.9, 0.6, 10 ],
[559, 2, 77.45, 15.49, 0, 0, 5, 1.0, 0, 220.0, 0, 1.1, 0.9, 0.6, 10 ],
[18, 1, 0, 0, 0, 0, 5, -2.3858746735291997e+17,0, 380.0, 0, 1.1, 0.9, 0.6, 10 ],
[73, 2, 93.08, 18.62, 0, 0, 5, 1.0, 0, 380.0, 0, 1.1, 0.9, 0.6, 10 ],
[503, 2, 78.59, 15.72, 0, 0, 5, 1.0, 0, 220.0, 0, 1.1, 0.9, 0.6, 10 ],
[511, 2, 115.07, 23.01, 0, 0, 5, 1.0, 0, 220.0, 0, 1.1, 0.9, 0.6, 10 ],
[72, 2, 290.75, 58.15, 0, 0, 5, 1.0, 0, 380.0, 0, 1.1, 0.9, 0.6, 10 ],
[288, 2, 67.94, 13.59, 0, 0, 5, 1.0, 0, 380.0, 0, 1.1, 0.9, 0.6, 10 ],
[339, 2, 169.7, 33.94, 0, 0, 5, 1.0, 0, 220.0, 0, 1.1, 0.9, 0.6, 10 ],
[321, 2, 218.83, 43.77, 0, 0, 5, 1.0, 0, 220.0, 0, 1.1, 0.9, 0.6, 10 ],
[8, 2, 168.11, 33.62, 0, 0, 5, 1.0, 0, 380.0, 0, 1.1, 0.9, 0.6, 10 ],
[292, 2, 138.63, 27.73, 0, 0, 5, 1.0, 0, 380.0, 0, 1.1, 0.9, 0.6, 10 ],
[343, 2, 123.44, 24.69, 0, 0, 5, 1.0, 0, 220.0, 0, 1.1, 0.9, 0.6, 10 ],
[12, 1, 0, 0, 0, 0, 5, -4.434607059139137e+17,0, 380.0, 0, 1.1, 0.9, 0.6, 10 ],
[340, 2, 143.48, 28.7, 0, 0, 5, 1.0, 0, 220.0, 0, 1.1, 0.9, 0.6, 10 ],
[177, 2, 29.53, 5.91, 0, 0, 5, 1.0, 0, 220.0, 0, 1.1, 0.9, 0.6, 10 ],
[497, 2, 1072.31, 214.46, 0, 0, 5, 1.0, 0, 220.0, 0, 1.1, 0.9, 0.6, 10 ],
[102, 2, 155.56, 31.11, 0, 0, 5, 1.0, 0, 380.0, 0, 1.1, 0.9, 0.6, 10 ],
[311, 2, 213.82, 42.76, 0, 0, 5, 1.0, 0, 380.0, 0, 1.1, 0.9, 0.6, 10 ],
[429, 2, 366.0, 73.2, 0, 0, 5, 1.0, 0, 220.0, 0, 1.1, 0.9, 0.6, 10 ],
[32, 1, 0, 0, 0, 0, 5, -4.398680742766838e+17,0, 380.0, 0, 1.1, 0.9, 0.6, 10 ],
[22, 1, 0, 0, 0, 0, 5, -7.957342523030432e+17,0, 380.0, 0, 1.1, 0.9, 0.6, 10 ],
[101, 2, 80.37, 16.07, 0, 0, 5, 1.0, 0, 380.0, 0, 1.1, 0.9, 0.6, 10 ],
[71, 2, 177.52, 35.5, 0, 0, 5, 1.0, 0, 380.0, 0, 1.1, 0.9, 0.6, 10 ],
[558, 2, 144.71, 28.94, 0, 0, 5, 1.0, 0, 220.0, 0, 1.1, 0.9, 0.6, 10 ],
[217, 2, 43.79, 8.76, 0, 0, 5, 1.0, 0, 380.0, 0, 1.1, 0.9, 0.6, 10 ],
[322, 2, 27.86, 5.57, 0, 0, 5, 1.0, 0, 380.0, 0, 1.1, 0.9, 0.6, 10 ],
[278, 2, 161.88, 32.38, 0, 0, 5, 1.0, 0, 380.0, 0, 1.1, 0.9, 0.6, 10 ],
[498, 2, 50.29, 10.06, 0, 0, 5, 1.0, 0, 220.0, 0, 1.1, 0.9, 0.6, 10 ],
[47, 2, 365.04, 73.01, 0, 0, 5, 1.0, 0, 380.0, 0, 1.1, 0.9, 0.6, 10 ],
[346, 2, 335.96, 67.19, 0, 0, 5, 1.0, 0, 380.0, 0, 1.1, 0.9, 0.6, 10 ],
[74, 1, 0, 0, 0, 0, 5, -403390353317533.4,0, 380.0, 0, 1.1, 0.9, 0.6, 10 ],
[557, 2, 245.42, 49.08, 0, 0, 5, 1.0, 0, 220.0, 0, 1.1, 0.9, 0.6, 10 ],
[42, 1, 0, 0, 0, 0, 5, -1.415545919278442e+16,0, 380.0, 0, 1.1, 0.9, 0.6, 10 ],
[39, 3, 71.81, 14.36, 0, 0, 5, 1.0, 0, 380.0, 0, 1.1, 0.9, 0.6, 10 ],
[45, 2, 83.95, 16.79, 0, 0, 5, 1.0, 0, 220.0, 0, 1.1, 0.9, 0.6, 10 ],
[493, 2, 112.53, 22.51, 0, 0, 5, 1.0, 0, 220.0, 0, 1.1, 0.9, 0.6, 10 ],
[98, 2, 113.5, 22.7, 0, 0, 5, 1.0, 0, 380.0, 0, 1.1, 0.9, 0.6, 10 ],
[435, 2, 162.14, 32.43, 0, 0, 5, 1.0, 0, 380.0, 0, 1.1, 0.9, 0.6, 10 ],
[338, 2, 274.38, 54.88, 0, 0, 5, 1.0, 0, 220.0, 0, 1.1, 0.9, 0.6, 10 ],
[79, 2, 111.99, 22.4, 0, 0, 5, 1.0, 0, 380.0, 0, 1.1, 0.9, 0.6, 10 ],
[78, 1, 0, 0, 0, 0, 5, -4.911995057919727e+16,0, 380.0, 0, 1.1, 0.9, 0.6, 10 ],
[512, 2, 76.01, 15.2, 0, 0, 5, 1.0, 0, 220.0, 0, 1.1, 0.9, 0.6, 10 ],
[276, 2, 207.37, 41.47, 0, 0, 5, 1.0, 0, 220.0, 0, 1.1, 0.9, 0.6, 10 ],
[569, 2, 200.82, 40.16, 0, 0, 5, 1.0, 0, 220.0, 0, 1.1, 0.9, 0.6, 10 ],
[37, 1, 0, 0, 0, 0, 5, -5.003301708953094e+18,0, 380.0, 0, 1.1, 0.9, 0.6, 10 ],
[179, 2, 57.62, 11.52, 0, 0, 5, 1.0, 0, 220.0, 0, 1.1, 0.9, 0.6, 10 ],
[325, 2, 166.91, 33.38, 0, 0, 5, 1.0, 0, 380.0, 0, 1.1, 0.9, 0.6, 10 ],
[20, 1, 0, 0, 0, 0, 5, -1.601083737369627e+17,0, 380.0, 0, 1.1, 0.9, 0.6, 10 ],
[496, 2, 8.58, 1.72, 0, 0, 5, 1.0, 0, 220.0, 0, 1.1, 0.9, 0.6, 10 ],
[279, 1, 0, 0, 0, 0, 5, -1.8506528552049567e+18,0, 380.0, 0, 1.1, 0.9, 0.6, 10 ],
[436, 2, 86.57, 17.31, 0, 0, 5, 1.0, 0, 380.0, 0, 1.1, 0.9, 0.6, 10 ],
[345, 2, 338.41, 67.68, 0, 0, 5, 1.0, 0, 380.0, 0, 1.1, 0.9, 0.6, 10 ],
[505, 2, 365.04, 73.01, 0, 0, 5, 1.0, 0, 220.0, 0, 1.1, 0.9, 0.6, 10 ],
[290, 1, 0, 0, 0, 0, 5, -1.5714751080642217e+18,0, 380.0, 0, 1.1, 0.9, 0.6, 10 ],
[363, 2, 342.45, 68.49, 0, 0, 5, 1.0, 0, 380.0, 0, 1.1, 0.9, 0.6, 10 ],
[11, 2, 99.61, 19.92, 0, 0, 5, 1.0, 0, 220.0, 0, 1.1, 0.9, 0.6, 10 ],
[277, 1, 0, 0, 0, 0, 5, -104764816508018.31,0, 380.0, 0, 1.1, 0.9, 0.6, 10 ],
[441, 2, 63.82, 12.76, 0, 0, 5, 1.0, 0, 220.0, 0, 1.1, 0.9, 0.6, 10 ],
[280, 1, 0, 0, 0, 0, 5, -3.5031948594884896e+17,0, 380.0, 0, 1.1, 0.9, 0.6, 10 ],
[504, 2, 51.47, 10.29, 0, 0, 5, 1.0, 0, 220.0, 0, 1.1, 0.9, 0.6, 10 ],
[181, 2, 38.23, 7.65, 0, 0, 5, 1.0, 0, 220.0, 0, 1.1, 0.9, 0.6, 10 ],
[291, 2, 70.32, 14.06, 0, 0, 5, 1.0, 0, 220.0, 0, 1.1, 0.9, 0.6, 10 ],
[344, 2, 309.49, 61.9, 0, 0, 5, 1.0, 0, 380.0, 0, 1.1, 0.9, 0.6, 10 ],
[40, 2, 75.01, 15.0, 0, 0, 5, 1.0, 0, 380.0, 0, 1.1, 0.9, 0.6, 10 ],
[80, 2, 118.95, 23.79, 0, 0, 5, 1.0, 0, 380.0, 0, 1.1, 0.9, 0.6, 10 ],
[183, 2, 518.4, 103.68, 0, 0, 5, 1.0, 0, 380.0, 0, 1.1, 0.9, 0.6, 10 ],
[440, 2, 83.25, 16.65, 0, 0, 5, 1.0, 0, 220.0, 0, 1.1, 0.9, 0.6, 10 ],
[43, 2, 123.62, 24.72, 0, 0, 5, 1.0, 0, 220.0, 0, 1.1, 0.9, 0.6, 10 ],
[10, 1, 0, 0, 0, 0, 5, -2.0934653162760187e+18,0, 380.0, 0, 1.1, 0.9, 0.6, 10 ],
[81, 2, 134.28, 26.86, 0, 0, 5, 1.0, 0, 380.0, 0, 1.1, 0.9, 0.6, 10 ],
[560, 2, 120.99, 24.2, 0, 0, 5, 1.0, 0, 220.0, 0, 1.1, 0.9, 0.6, 10 ],
[341, 2, 129.71, 25.94, 0, 0, 5, 1.0, 0, 380.0, 0, 1.1, 0.9, 0.6, 10 ],
[17, 2, 95.7, 19.14, 0, 0, 5, 1.0, 0, 380.0, 0, 1.1, 0.9, 0.6, 10 ],
[41, 2, 80.61, 16.12, 0, 0, 5, 1.0, 0, 380.0, 0, 1.1, 0.9, 0.6, 10 ],
[192, 2, 60.74, 12.15, 0, 0, 0, 1.0, 0, 220.0, 0, 1.1, 0.9, 0.6, 10 ],
[342, 2, 225.0, 45.0, 0, 0, 0, 1.0, 0, 220.0, 0, 1.1, 0.9, 0.6, 10 ],
[218, 2, 133.41, 26.68, 0, 0, 0, 1.0, 0, 220.0, 0, 1.1, 0.9, 0.6, 10 ],
[65, 2, 5.93, 1.19, 0, 0, 0, 1.0, 0, 380.0, 0, 1.1, 0.9, 0.6, 10 ],
[289, 2, 106.86, 21.37, 0, 0, 0, 1.0, 0, 380.0, 0, 1.1, 0.9, 0.6, 10 ],
[324, 2, 512.38, 102.48, 0, 0, 0, 1.0, 0, 380.0, 0, 1.1, 0.9, 0.6, 10 ],
[48, 2, 250.92, 50.18, 0, 0, 0, 1.0, 0, 380.0, 0, 1.1, 0.9, 0.6, 10 ],
[332, 1, 0, 0, 0, 0, 0, 1.877558334966159e+16,0, 380.0, 0, 1.1, 0.9, 0.6, 10 ],
[422, 2, 83.54, 16.71, 0, 0, 0, 1.0, 0, 380.0, 0, 1.1, 0.9, 0.6, 10 ],
[23, 2, 133.12, 26.62, 0, 0, 0, 1.0, 0, 220.0, 0, 1.1, 0.9, 0.6, 10 ],
[570, 2, 313.52, 62.7, 0, 0, 0, 1.0, 0, 220.0, 0, 1.1, 0.9, 0.6, 10 ],
[38, 2, 219.29, 43.86, 0, 0, 0, 1.0, 0, 380.0, 0, 1.1, 0.9, 0.6, 10 ],
[31, 2, 166.94, 33.39, 0, 0, 0, 1.0, 0, 380.0, 0, 1.1, 0.9, 0.6, 10 ],
[49, 2, 63.47, 12.69, 0, 0, 0, 1.0, 0, 380.0, 0, 1.1, 0.9, 0.6, 10 ],
[182, 2, 1.73, 0.35, 0, 0, 0, 1.0, 0, 220.0, 0, 1.1, 0.9, 0.6, 10 ],
[9, 2, 113.69, 22.74, 0, 0, 0, 1.0, 0, 380.0, 0, 1.1, 0.9, 0.6, 10 ],
[323, 2, 2.9, 0.58, 0, 0, 0, 1.0, 0, 380.0, 0, 1.1, 0.9, 0.6, 10 ],
[400, 2, 60.77, 12.15, 0, 0, 0, 1.0, 0, 220.0, 0, 1.1, 0.9, 0.6, 10 ],
[30, 1, 0, 0, 0, 0, 0, -3.6188081968897946e+17,0, 380.0, 0, 1.1, 0.9, 0.6, 10 ],
[25, 2, 63.67, 12.73, 0, 0, 0, 1.0, 0, 380.0, 0, 1.1, 0.9, 0.6, 10 ]
])
ppc["gen"] = array([
[102, 0, 0, 33.95, -8.49, 1.0, 100, 1, 67.9, 0.0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0.95, 101.85, 13.58, 20.37, 20.37, 27.16 ],
[493, 0, 0, 75.0, -18.75, 1.0, 100, 1, 150.0, 0.0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0.95, 225.0, 30.0, 45.0, 45.0, 60.0 ],
[493, 0, 0, 15.0, -3.75, 1.0, 100, 1, 30.0, 0.0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0.95, 45.0, 6.0, 9.0, 9.0, 12.0 ],
[177, 0, 0, 16.35, -4.09, 1.0, 100, 1, 32.7, 0.0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0.95, 49.05, 6.54, 9.81, 9.81, 13.08 ],
[180, 0, 0, 12.7, -3.18, 1.0, 100, 1, 25.4, 0.0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0.95, 38.1, 5.08, 7.62, 7.62, 10.16 ],
[180, 0, 0, 166.75, -41.69, 1.0, 100, 1, 333.5, 0.0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0.95, 500.25, 66.7, 100.05, 100.05, 133.4 ],
[180, 0, 0, 14.45, -3.61, 1.0, 100, 1, 28.9, 0.0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0.95, 43.35, 5.78, 8.67, 8.67, 11.56 ],
[183, 0, 0, 11.25, -2.81, 1.0, 100, 1, 22.5, 0.0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0.95, 33.75, 4.5, 6.75, 6.75, 9.0 ],
[183, 0, 0, 383.0, -95.75, 1.0, 100, 1, 766.0, 0.0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0.95, 1149.0, 153.2, 229.8, 229.8, 306.4 ],
[183, 0, 0, 19.0, -4.75, 1.0, 100, 1, 38.0, 0.0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0.95, 57.0, 7.6, 11.4, 11.4, 15.2 ],
[183, 0, 0, 12.0, -3.0, 1.0, 100, 1, 24.0, 0.0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0.95, 36.0, 4.8, 7.2, 7.2, 9.6 ],
[496, 0, 0, 26.4, -6.6, 1.0, 100, 1, 52.8, 0.0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0.95, 79.2, 10.56, 15.84, 15.84, 21.12 ],
[21, 0, 0, 63.5, -15.88, 1.0, 100, 1, 127.0, 0.0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0.95, 190.5, 25.4, 38.1, 38.1, 50.8 ],
[21, 0, 0, 97.0, -24.25, 1.0, 100, 1, 194.0, 0.0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0.95, 291.0, 38.8, 58.2, 58.2, 77.6 ],
[21, 0, 0, 8.2, -2.05, 1.0, 100, 1, 16.4, 0.0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0.95, 24.6, 3.28, 4.92, 4.92, 6.56 ],
[217, 0, 0, 54.0, -13.5, 1.0, 100, 1, 108.0, 0.0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0.95, 162.0, 21.6, 32.4, 32.4, 43.2 ],
[217, 0, 0, 254.0, -63.5, 1.0, 100, 1, 508.0, 0.0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0.95, 762.0, 101.6, 152.4, 152.4, 203.2 ],
[217, 0, 0, 8.5, -2.12, 1.0, 100, 1, 17.0, 0.0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0.95, 25.5, 3.4, 5.1, 5.1, 6.8 ],
[498, 0, 0, 149.25, -37.31, 1.0, 100, 1, 298.5, 0.0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0.95, 447.75, 59.7, 89.55, 89.55, 119.4 ],
[557, 0, 0, 45.4, -11.35, 1.0, 100, 1, 90.8, 0.0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0.95, 136.2, 18.16, 27.24, 27.24, 36.32 ],
[558, 0, 0, 37.0, -9.25, 1.0, 100, 1, 74.0, 0.0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0.95, 111.0, 14.8, 22.2, 22.2, 29.6 ],
[559, 0, 0, 8.5, -2.12, 1.0, 100, 1, 17.0, 0.0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0.95, 25.5, 3.4, 5.1, 5.1, 6.8 ],
[288, 0, 0, 7.85, -1.96, 1.0, 100, 1, 15.7, 0.0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0.95, 23.55, 3.14, 4.71, 4.71, 6.28 ],
[289, 0, 0, 552.5, -138.12, 1.0, 100, 1, 1105.0, 0.0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0.95, 1657.5, 221.0, 331.5, 331.5, 442.0 ],
[560, 0, 0, 10.15, -2.54, 1.0, 100, 1, 20.3, 0.0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0.95, 30.45, 4.06, 6.09, 6.09, 8.12 ],
[560, 0, 0, 108.0, -27.0, 1.0, 100, 1, 216.0, 0.0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0.95, 324.0, 43.2, 64.8, 64.8, 86.4 ],
[560, 0, 0, 29.5, -7.38, 1.0, 100, 1, 59.0, 0.0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0.95, 88.5, 11.8, 17.7, 17.7, 23.6 ],
[292, 0, 0, 6.75, -1.69, 1.0, 100, 1, 13.5, 0.0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0.95, 20.25, 2.7, 4.05, 4.05, 5.4 ],
[292, 0, 0, 5.6, -1.4, 1.0, 100, 1, 11.2, 0.0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0.95, 16.8, 2.24, 3.36, 3.36, 4.48 ],
[31, 0, 0, 97.7, -24.42, 1.0, 100, 1, 195.4, 0.0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0.95, 293.1, 39.08, 58.62, 58.62, 78.16 ],
[311, 0, 0, 437.5, -109.38, 1.0, 100, 1, 875.0, 0.0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0.95, 1312.5, 175.0, 262.5, 262.5, 350.0 ],
[321, 0, 0, 6.45, -1.61, 1.0, 100, 1, 12.9, 0.0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0.95, 19.35, 2.58, 3.87, 3.87, 5.16 ],
[324, 0, 0, 159.9, -39.98, 1.0, 100, 1, 319.8, 0.0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0.95, 479.7, 63.96, 95.94, 95.94, 127.92 ],
[325, 0, 0, 13.45, -3.36, 1.0, 100, 1, 26.9, 0.0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0.95, 40.35, 5.38, 8.07, 8.07, 10.76 ],
[502, 0, 0, 54.55, -13.64, 1.0, 100, 1, 109.1, 0.0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0.95, 163.65, 21.82, 32.73, 32.73, 43.64 ],
[33, 0, 0, 15.9, -3.98, 1.0, 100, 1, 31.8, 0.0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0.95, 47.7, 6.36, 9.54, 9.54, 12.72 ],
[570, 0, 0, 26.4, -6.6, 1.0, 100, 1, 52.8, 0.0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0.95, 79.2, 10.56, 15.84, 15.84, 21.12 ],
[570, 0, 0, 44.5, -11.12, 1.0, 100, 1, 89.0, 0.0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0.95, 133.5, 17.8, 26.7, 26.7, 35.6 ],
[338, 0, 0, 149.8, -37.45, 1.0, 100, 1, 299.6, 0.0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0.95, 449.4, 59.92, 89.88, 89.88, 119.84 ],
[338, 0, 0, 41.25, -10.31, 1.0, 100, 1, 82.5, 0.0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0.95, 123.75, 16.5, 24.75, 24.75, 33.0 ],
[339, 0, 0, 67.0, -16.75, 1.0, 100, 1, 134.0, 0.0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0.95, 201.0, 26.8, 40.2, 40.2, 53.6 ],
[339, 0, 0, 79.5, -19.88, 1.0, 100, 1, 159.0, 0.0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0.95, 238.5, 31.8, 47.7, 47.7, 63.6 ],
[339, 0, 0, 55.5, -13.88, 1.0, 100, 1, 111.0, 0.0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0.95, 166.5, 22.2, 33.3, 33.3, 44.4 ],
[339, 0, 0, 21.35, -5.34, 1.0, 100, 1, 42.7, 0.0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0.95, 64.05, 8.54, 12.81, 12.81, 17.08 ],
[339, 0, 0, 29.0, -7.25, 1.0, 100, 1, 58.0, 0.0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0.95, 87.0, 11.6, 17.4, 17.4, 23.2 ],
[340, 0, 0, 9.75, -2.44, 1.0, 100, 1, 19.5, 0.0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0.95, 29.25, 3.9, 5.85, 5.85, 7.8 ],
[342, 0, 0, 34.98, -8.74, 1.0, 100, 1, 69.95, 0.0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0.95, 104.93, 13.99, 20.98, 20.98, 27.98 ],
[345, 0, 0, 105.5, -26.38, 1.0, 100, 1, 211.0, 0.0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0.95, 316.5, 42.2, 63.3, 63.3, 84.4 ],
[345, 0, 0, 44.5, -11.12, 1.0, 100, 1, 89.0, 0.0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0.95, 133.5, 17.8, 26.7, 26.7, 35.6 ],
[345, 0, 0, 163.5, -40.88, 1.0, 100, 1, 327.0, 0.0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0.95, 490.5, 65.4, 98.1, 98.1, 130.8 ],
[346, 0, 0, 229.45, -57.36, 1.0, 100, 1, 458.9, 0.0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0.95, 688.35, 91.78, 137.67, 137.67, 183.56 ],
[363, 0, 0, 40.9, -10.22, 1.0, 100, 1, 81.8, 0.0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0.95, 122.7, 16.36, 24.54, 24.54, 32.72 ],
[363, 0, 0, 344.0, -86.0, 1.0, 100, 1, 688.0, 0.0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0.95, 1032.0, 137.6, 206.4, 206.4, 275.2 ],
[363, 0, 0, 18.0, -4.5, 1.0, 100, 1, 36.0, 0.0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0.95, 54.0, 7.2, 10.8, 10.8, 14.4 ],
[503, 0, 0, 26.0, -6.5, 1.0, 100, 1, 52.0, 0.0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0.95, 78.0, 10.4, 15.6, 15.6, 20.8 ],
[503, 0, 0, 680.0, -170.0, 1.0, 100, 1, 1360.0, 0.0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0.95, 2040.0, 272.0, 408.0, 408.0, 544.0 ],
[503, 0, 0, 29.2, -7.3, 1.0, 100, 1, 58.4, 0.0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0.95, 87.6, 11.68, 17.52, 17.52, 23.36 ],
[39, 0, 0, 1149.65, -287.41, 1.0, 100, 1, 2299.3, 0.0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0.95, 3448.95, 459.86, 689.79, 689.79, 919.72 ],
[40, 0, 0, 24.0, -6.0, 1.0, 100, 1, 48.0, 0.0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0.95, 72.0, 9.6, 14.4, 14.4, 19.2 ],
[400, 0, 0, 44.0, -11.0, 1.0, 100, 1, 88.0, 0.0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0.95, 132.0, 17.6, 26.4, 26.4, 35.2 ],
[400, 0, 0, 30.0, -7.5, 1.0, 100, 1, 60.0, 0.0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0.95, 90.0, 12.0, 18.0, 18.0, 24.0 ],
[400, 0, 0, 79.0, -19.75, 1.0, 100, 1, 158.0, 0.0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0.95, 237.0, 31.6, 47.4, 47.4, 63.2 ],
[422, 0, 0, 37.0, -9.25, 1.0, 100, 1, 74.0, 0.0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0.95, 111.0, 14.8, 22.2, 22.2, 29.6 ],
[43, 0, 0, 98.0, -24.5, 1.0, 100, 1, 196.0, 0.0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0.95, 294.0, 39.2, 58.8, 58.8, 78.4 ],
[43, 0, 0, 8.5, -2.12, 1.0, 100, 1, 17.0, 0.0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0.95, 25.5, 3.4, 5.1, 5.1, 6.8 ],
[429, 0, 0, 82.0, -20.5, 1.0, 100, 1, 164.0, 0.0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0.95, 246.0, 32.8, 49.2, 49.2, 65.6 ],
[44, 0, 0, 13.0, -3.25, 1.0, 100, 1, 26.0, 0.0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0.95, 39.0, 5.2, 7.8, 7.8, 10.4 ],
[435, 0, 0, 91.0, -22.75, 1.0, 100, 1, 182.0, 0.0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0.95, 273.0, 36.4, 54.6, 54.6, 72.8 ],
[435, 0, 0, 30.75, -7.69, 1.0, 100, 1, 61.5, 0.0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0.95, 92.25, 12.3, 18.45, 18.45, 24.6 ],
[436, 0, 0, 13.25, -3.31, 1.0, 100, 1, 26.5, 0.0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0.95, 39.75, 5.3, 7.95, 7.95, 10.6 ],
[440, 0, 0, 7.35, -1.84, 1.0, 100, 1, 14.7, 0.0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0.95, 22.05, 2.94, 4.41, 4.41, 5.88 ],
[441, 0, 0, 37.5, -9.38, 1.0, 100, 1, 75.0, 0.0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0.95, 112.5, 15.0, 22.5, 22.5, 30.0 ],
[45, 0, 0, 148.0, -37.0, 1.0, 100, 1, 296.0, 0.0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0.95, 444.0, 59.2, 88.8, 88.8, 118.4 ],
[45, 0, 0, 11.55, -2.89, 1.0, 100, 1, 23.1, 0.0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0.95, 34.65, 4.62, 6.93, 6.93, 9.24 ],
[47, 0, 0, 222.0, -55.5, 1.0, 100, 1, 444.0, 0.0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0.95, 666.0, 88.8, 133.2, 133.2, 177.6 ],
[47, 0, 0, 15.85, -3.96, 1.0, 100, 1, 31.7, 0.0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0.95, 47.55, 6.34, 9.51, 9.51, 12.68 ],
[49, 0, 0, 176.0, -44.0, 1.0, 100, 1, 352.0, 0.0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0.95, 528.0, 70.4, 105.6, 105.6, 140.8 ],
[49, 0, 0, 33.0, -8.25, 1.0, 100, 1, 66.0, 0.0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0.95, 99.0, 13.2, 19.8, 19.8, 26.4 ],
[49, 0, 0, 18.75, -4.69, 1.0, 100, 1, 37.5, 0.0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0.95, 56.25, 7.5, 11.25, 11.25, 15.0 ],
[65, 0, 0, 82.25, -20.56, 1.0, 100, 1, 164.5, 0.0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0.95, 246.75, 32.9, 49.35, 49.35, 65.8 ],
[71, 0, 0, 24.5, -6.12, 1.0, 100, 1, 49.0, 0.0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0.95, 73.5, 9.8, 14.7, 14.7, 19.6 ],
[71, 0, 0, 80.55, -20.14, 1.0, 100, 1, 161.1, 0.0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0.95, 241.65, 32.22, 48.33, 48.33, 64.44 ],
[71, 0, 0, 4.95, -1.24, 1.0, 100, 1, 9.9, 0.0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0.95, 14.85, 1.98, 2.97, 2.97, 3.96 ],
[72, 0, 0, 450.0, -112.5, 1.0, 100, 1, 900.0, 0.0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0.95, 1350.0, 180.0, 270.0, 270.0, 360.0 ],
[72, 0, 0, 75.5, -18.88, 1.0, 100, 1, 151.0, 0.0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0.95, 226.5, 30.2, 45.3, 45.3, 60.4 ],
[72, 0, 0, 60.0, -15.0, 1.0, 100, 1, 120.0, 0.0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0.95, 180.0, 24.0, 36.0, 36.0, 48.0 ],
[511, 0, 0, 61.0, -15.25, 1.0, 100, 1, 122.0, 0.0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0.95, 183.0, 24.4, 36.6, 36.6, 48.8 ],
[511, 0, 0, 11.65, -2.91, 1.0, 100, 1, 23.3, 0.0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0.95, 34.95, 4.66, 6.99, 6.99, 9.32 ],
[75, 0, 0, 24.5, -6.12, 1.0, 100, 1, 49.0, 0.0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0.95, 73.5, 9.8, 14.7, 14.7, 19.6 ],
[79, 0, 0, 375.0, -93.75, 1.0, 100, 1, 750.0, 0.0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0.95, 1125.0, 150.0, 225.0, 225.0, 300.0 ],
[79, 0, 0, 9.35, -2.34, 1.0, 100, 1, 18.7, 0.0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0.95, 28.05, 3.74, 5.61, 5.61, 7.48 ],
[81, 0, 0, 1417.5, -354.38, 1.0, 100, 1, 2835.0, 0.0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0.95, 4252.5, 567.0, 850.5, 850.5, 1134.0 ],
[81, 0, 0, 62.25, -15.56, 1.0, 100, 1, 124.5, 0.0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0.95, 186.75, 24.9, 37.35, 37.35, 49.8 ],
[218, 0, 0, 23.05, -5.76, 1.0, 100, 1, 46.1, 0.0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 1, 0.95, 69.15, 9.22, 13.83, 13.83, 18.44 ],
[498, 0, 0, 685.85, -171.46, 1.0, 100, 1, 1371.7, 0.0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 1, 0.95, 2057.55, 274.34, 411.51, 411.51, 548.68 ],
[8, 0, 0, 22.83, -5.71, 1.0, 100, 1, 45.66, 0.0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 1, 0.95, 68.49, 9.13, 13.7, 13.7, 18.26 ],
[9, 0, 0, 20.58, -5.14, 1.0, 100, 1, 41.15, 0.0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 1, 0.95, 61.73, 8.23, 12.35, 12.35, 16.46 ],
[11, 0, 0, 57.03, -14.26, 1.0, 100, 1, 114.06, 0.0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 1, 0.95, 171.1, 22.81, 34.22, 34.22, 45.63 ],
[17, 0, 0, 22.05, -5.51, 1.0, 100, 1, 44.1, 0.0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 1, 0.95, 66.15, 8.82, 13.23, 13.23, 17.64 ],
[21, 0, 0, 8.11, -2.03, 1.0, 100, 1, 16.22, 0.0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 1, 0.95, 24.34, 3.24, 4.87, 4.87, 6.49 ],
[23, 0, 0, 13.69, -3.42, 1.0, 100, 1, 27.37, 0.0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 1, 0.95, 41.06, 5.47, 8.21, 8.21, 10.95 ],
[25, 0, 0, 27.9, -6.97, 1.0, 100, 1, 55.8, 0.0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 1, 0.95, 83.7, 11.16, 16.74, 16.74, 22.32 ],
[31, 0, 0, 17.48, -4.37, 1.0, 100, 1, 34.95, 0.0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 1, 0.95, 52.43, 6.99, 10.49, 10.49, 13.98 ],
[33, 0, 0, 132.34, -33.09, 1.0, 100, 1, 264.69, 0.0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 1, 0.95, 397.03, 52.94, 79.41, 79.41, 105.87 ],
[38, 0, 0, 1.18, -0.3, 1.0, 100, 1, 2.36, 0.0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 1, 0.95, 3.54, 0.47, 0.71, 0.71, 0.94 ],
[39, 0, 0, 176.96, -44.24, 1.0, 100, 1, 353.92, 0.0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 1, 0.95, 530.88, 70.78, 106.18, 106.18, 141.57 ],
[40, 0, 0, 192.44, -48.11, 1.0, 100, 1, 384.87, 0.0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 1, 0.95, 577.31, 76.97, 115.46, 115.46, 153.95 ],
[41, 0, 0, 371.6, -92.9, 1.0, 100, 1, 743.2, 0.0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 1, 0.95, 1114.79, 148.64, 222.96, 222.96, 297.28 ],
[43, 0, 0, 167.23, -41.81, 1.0, 100, 1, 334.46, 0.0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 1, 0.95, 501.69, 66.89, 100.34, 100.34, 133.78 ],
[44, 0, 0, 4.2, -1.05, 1.0, 100, 1, 8.4, 0.0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 1, 0.95, 12.59, 1.68, 2.52, 2.52, 3.36 ],
[45, 0, 0, 68.54, -17.14, 1.0, 100, 1, 137.08, 0.0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 1, 0.95, 205.62, 27.42, 41.12, 41.12, 54.83 ],
[47, 0, 0, 1.17, -0.29, 1.0, 100, 1, 2.33, 0.0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 1, 0.95, 3.5, 0.47, 0.7, 0.7, 0.93 ],
[48, 0, 0, 3.5, -0.88, 1.0, 100, 1, 7.01, 0.0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 1, 0.95, 10.51, 1.4, 2.1, 2.1, 2.8 ],
[49, 0, 0, 59.17, -14.79, 1.0, 100, 1, 118.34, 0.0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 1, 0.95, 177.51, 23.67, 35.5, 35.5, 47.34 ],
[65, 0, 0, 9.0, -2.25, 1.0, 100, 1, 18.0, 0.0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 1, 0.95, 27.0, 3.6, 5.4, 5.4, 7.2 ],
[71, 0, 0, 124.07, -31.02, 1.0, 100, 1, 248.13, 0.0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 1, 0.95, 372.2, 49.63, 74.44, 74.44, 99.25 ],
[72, 0, 0, 176.86, -44.21, 1.0, 100, 1, 353.72, 0.0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 1, 0.95, 530.58, 70.74, 106.12, 106.12, 141.49 ],
[73, 0, 0, 254.18, -63.54, 1.0, 100, 1, 508.36, 0.0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 1, 0.95, 762.53, 101.67, 152.51, 152.51, 203.34 ],
[75, 0, 0, 194.0, -48.5, 1.0, 100, 1, 387.99, 0.0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 1, 0.95, 581.99, 77.6, 116.4, 116.4, 155.2 ],
[79, 0, 0, 57.98, -14.5, 1.0, 100, 1, 115.96, 0.0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 1, 0.95, 173.95, 23.19, 34.79, 34.79, 46.39 ],
[80, 0, 0, 18.38, -4.6, 1.0, 100, 1, 36.76, 0.0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 1, 0.95, 55.14, 7.35, 11.03, 11.03, 14.7 ],
[81, 0, 0, 176.21, -44.05, 1.0, 100, 1, 352.41, 0.0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 1, 0.95, 528.62, 70.48, 105.72, 105.72, 140.96 ],
[98, 0, 0, 34.74, -8.69, 1.0, 100, 1, 69.48, 0.0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 1, 0.95, 104.22, 13.9, 20.84, 20.84, 27.79 ],
[101, 0, 0, 120.76, -30.19, 1.0, 100, 1, 241.53, 0.0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 1, 0.95, 362.29, 48.31, 72.46, 72.46, 96.61 ],
[102, 0, 0, 73.71, -18.43, 1.0, 100, 1, 147.43, 0.0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 1, 0.95, 221.14, 29.49, 44.23, 44.23, 58.97 ],
[177, 0, 0, 103.42, -25.85, 1.0, 100, 1, 206.83, 0.0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 1, 0.95, 310.25, 41.37, 62.05, 62.05, 82.73 ],
[179, 0, 0, 225.18, -56.3, 1.0, 100, 1, 450.36, 0.0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 1, 0.95, 675.54, 90.07, 135.11, 135.11, 180.14 ],
[180, 0, 0, 144.9, -36.23, 1.0, 100, 1, 289.8, 0.0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 1, 0.95, 434.7, 57.96, 86.94, 86.94, 115.92 ],
[181, 0, 0, 134.42, -33.61, 1.0, 100, 1, 268.84, 0.0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 1, 0.95, 403.26, 53.77, 80.65, 80.65, 107.54 ],
[182, 0, 0, 25.68, -6.42, 1.0, 100, 1, 51.36, 0.0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 1, 0.95, 77.04, 10.27, 15.41, 15.41, 20.54 ],
[183, 0, 0, 18.67, -4.67, 1.0, 100, 1, 37.35, 0.0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 1, 0.95, 56.02, 7.47, 11.2, 11.2, 14.94 ],
[192, 0, 0, 99.66, -24.92, 1.0, 100, 1, 199.32, 0.0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 1, 0.95, 298.99, 39.86, 59.8, 59.8, 79.73 ],
[217, 0, 0, 63.31, -15.83, 1.0, 100, 1, 126.62, 0.0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 1, 0.95, 189.94, 25.32, 37.99, 37.99, 50.65 ],
[218, 0, 0, 37.87, -9.47, 1.0, 100, 1, 75.73, 0.0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 1, 0.95, 113.6, 15.15, 22.72, 22.72, 30.29 ],
[276, 0, 0, 107.85, -26.96, 1.0, 100, 1, 215.69, 0.0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 1, 0.95, 323.54, 43.14, 64.71, 64.71, 86.28 ],
[278, 0, 0, 174.92, -43.73, 1.0, 100, 1, 349.85, 0.0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 1, 0.95, 524.77, 69.97, 104.95, 104.95, 139.94 ],
[288, 0, 0, 37.79, -9.45, 1.0, 100, 1, 75.58, 0.0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 1, 0.95, 113.36, 15.12, 22.67, 22.67, 30.23 ],
[289, 0, 0, 15.34, -3.83, 1.0, 100, 1, 30.68, 0.0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 1, 0.95, 46.02, 6.14, 9.2, 9.2, 12.27 ],
[291, 0, 0, 11.46, -2.87, 1.0, 100, 1, 22.93, 0.0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 1, 0.95, 34.39, 4.59, 6.88, 6.88, 9.17 ],
[292, 0, 0, 6.99, -1.75, 1.0, 100, 1, 13.97, 0.0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 1, 0.95, 20.96, 2.79, 4.19, 4.19, 5.59 ],
[311, 0, 0, 16.02, -4.0, 1.0, 100, 1, 32.04, 0.0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 1, 0.95, 48.05, 6.41, 9.61, 9.61, 12.81 ],
[321, 0, 0, 24.65, -6.16, 1.0, 100, 1, 49.31, 0.0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 1, 0.95, 73.96, 9.86, 14.79, 14.79, 19.72 ],
[322, 0, 0, 49.28, -12.32, 1.0, 100, 1, 98.56, 0.0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 1, 0.95, 147.84, 19.71, 29.57, 29.57, 39.43 ],
[323, 0, 0, 12.77, -3.19, 1.0, 100, 1, 25.54, 0.0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 1, 0.95, 38.31, 5.11, 7.66, 7.66, 10.22 ],
[324, 0, 0, 30.95, -7.74, 1.0, 100, 1, 61.9, 0.0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 1, 0.95, 92.84, 12.38, 18.57, 18.57, 24.76 ],
[325, 0, 0, 79.05, -19.76, 1.0, 100, 1, 158.09, 0.0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 1, 0.95, 237.14, 31.62, 47.43, 47.43, 63.24 ],
[338, 0, 0, 26.94, -6.73, 1.0, 100, 1, 53.88, 0.0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 1, 0.95, 80.82, 10.78, 16.16, 16.16, 21.55 ],
[339, 0, 0, 60.18, -15.05, 1.0, 100, 1, 120.36, 0.0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 1, 0.95, 180.55, 24.07, 36.11, 36.11, 48.15 ],
[340, 0, 0, 70.54, -17.64, 1.0, 100, 1, 141.08, 0.0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 1, 0.95, 211.62, 28.22, 42.32, 42.32, 56.43 ],
[341, 0, 0, 1.8, -0.45, 1.0, 100, 1, 3.59, 0.0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 1, 0.95, 5.39, 0.72, 1.08, 1.08, 1.44 ],
[342, 0, 0, 45.47, -11.37, 1.0, 100, 1, 90.94, 0.0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 1, 0.95, 136.41, 18.19, 27.28, 27.28, 36.38 ],
[343, 0, 0, 19.45, -4.86, 1.0, 100, 1, 38.91, 0.0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 1, 0.95, 58.36, 7.78, 11.67, 11.67, 15.56 ],
[344, 0, 0, 1.01, -0.25, 1.0, 100, 1, 2.01, 0.0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 1, 0.95, 3.02, 0.4, 0.6, 0.6, 0.8 ],
[345, 0, 0, 2.19, -0.55, 1.0, 100, 1, 4.38, 0.0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 1, 0.95, 6.57, 0.88, 1.31, 1.31, 1.75 ],
[346, 0, 0, 2.38, -0.6, 1.0, 100, 1, 4.76, 0.0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 1, 0.95, 7.14, 0.95, 1.43, 1.43, 1.9 ],
[363, 0, 0, 1.37, -0.34, 1.0, 100, 1, 2.75, 0.0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 1, 0.95, 4.12, 0.55, 0.82, 0.82, 1.1 ],
[400, 0, 0, 51.11, -12.78, 1.0, 100, 1, 102.22, 0.0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 1, 0.95, 153.32, 20.44, 30.66, 30.66, 40.89 ],
[422, 0, 0, 34.74, -8.68, 1.0, 100, 1, 69.48, 0.0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 1, 0.95, 104.22, 13.9, 20.84, 20.84, 27.79 ],
[429, 0, 0, 0.5, -0.12, 1.0, 100, 1, 1.0, 0.0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 1, 0.95, 1.5, 0.2, 0.3, 0.3, 0.4 ],
[435, 0, 0, 85.56, -21.39, 1.0, 100, 1, 171.11, 0.0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 1, 0.95, 256.67, 34.22, 51.33, 51.33, 68.45 ],
[436, 0, 0, 16.75, -4.19, 1.0, 100, 1, 33.51, 0.0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 1, 0.95, 50.26, 6.7, 10.05, 10.05, 13.4 ],
[440, 0, 0, 111.25, -27.81, 1.0, 100, 1, 222.5, 0.0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 1, 0.95, 333.75, 44.5, 66.75, 66.75, 89.0 ],
[441, 0, 0, 115.58, -28.89, 1.0, 100, 1, 231.16, 0.0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 1, 0.95, 346.73, 46.23, 69.35, 69.35, 92.46 ],
[492, 0, 0, 199.63, -49.91, 1.0, 100, 1, 399.25, 0.0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 1, 0.95, 598.88, 79.85, 119.78, 119.78, 159.7 ],
[493, 0, 0, 141.59, -35.4, 1.0, 100, 1, 283.17, 0.0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 1, 0.95, 424.76, 56.63, 84.95, 84.95, 113.27 ],
[496, 0, 0, 108.47, -27.12, 1.0, 100, 1, 216.94, 0.0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 1, 0.95, 325.41, 43.39, 65.08, 65.08, 86.77 ],
[497, 0, 0, 2.57, -0.64, 1.0, 100, 1, 5.14, 0.0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 1, 0.95, 7.71, 1.03, 1.54, 1.54, 2.06 ],
[498, 0, 0, 441.55, -110.39, 1.0, 100, 1, 883.11, 0.0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 1, 0.95, 1324.66, 176.62, 264.93, 264.93, 353.24 ],
[502, 0, 0, 109.66, -27.42, 1.0, 100, 1, 219.33, 0.0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 1, 0.95, 328.99, 43.87, 65.8, 65.8, 87.73 ],
[503, 0, 0, 132.82, -33.21, 1.0, 100, 1, 265.65, 0.0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 1, 0.95, 398.47, 53.13, 79.69, 79.69, 106.26 ],
[504, 0, 0, 38.74, -9.69, 1.0, 100, 1, 77.48, 0.0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 1, 0.95, 116.23, 15.5, 23.25, 23.25, 30.99 ],
[505, 0, 0, 0.34, -0.08, 1.0, 100, 1, 0.68, 0.0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 1, 0.95, 1.01, 0.14, 0.2, 0.2, 0.27 ],
[511, 0, 0, 353.58, -88.39, 1.0, 100, 1, 707.15, 0.0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 1, 0.95, 1060.73, 141.43, 212.15, 212.15, 282.86 ],
[512, 0, 0, 43.36, -10.84, 1.0, 100, 1, 86.71, 0.0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 1, 0.95, 130.07, 17.34, 26.01, 26.01, 34.68 ],
[557, 0, 0, 39.54, -9.88, 1.0, 100, 1, 79.07, 0.0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 1, 0.95, 118.61, 15.81, 23.72, 23.72, 31.63 ],
[558, 0, 0, 107.15, -26.79, 1.0, 100, 1, 214.3, 0.0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 1, 0.95, 321.45, 42.86, 64.29, 64.29, 85.72 ],
[559, 0, 0, 35.77, -8.94, 1.0, 100, 1, 71.53, 0.0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 1, 0.95, 107.3, 14.31, 21.46, 21.46, 28.61 ],
[560, 0, 0, 131.41, -32.85, 1.0, 100, 1, 262.81, 0.0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 1, 0.95, 394.22, 52.56, 78.84, 78.84, 105.13 ],
[569, 0, 0, 17.89, -4.47, 1.0, 100, 1, 35.77, 0.0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 1, 0.95, 53.66, 7.15, 10.73, 10.73, 14.31 ],
[570, 0, 0, 84.3, -21.08, 1.0, 100, 1, 168.61, 0.0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 1, 0.95, 252.91, 33.72, 50.58, 50.58, 67.44 ],
[8, 0, 0, 0.99, -0.25, 1.0, 100, 1, 1.99, 0.0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 2, 0.95, 2.98, 0.4, 0.6, 0.6, 0.8 ],
[9, 0, 0, 0.69, -0.17, 1.0, 100, 1, 1.37, 0.0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 2, 0.95, 2.06, 0.27, 0.41, 0.41, 0.55 ],
[11, 0, 0, 1.61, -0.4, 1.0, 100, 1, 3.22, 0.0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 2, 0.95, 4.82, 0.64, 0.96, 0.96, 1.29 ],
[17, 0, 0, 1.08, -0.27, 1.0, 100, 1, 2.16, 0.0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 2, 0.95, 3.24, 0.43, 0.65, 0.65, 0.86 ],
[21, 0, 0, 0.58, -0.15, 1.0, 100, 1, 1.16, 0.0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 2, 0.95, 1.75, 0.23, 0.35, 0.35, 0.47 ],
[23, 0, 0, 0.7, -0.18, 1.0, 100, 1, 1.4, 0.0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 2, 0.95, 2.1, 0.28, 0.42, 0.42, 0.56 ],
[25, 0, 0, 0.56, -0.14, 1.0, 100, 1, 1.12, 0.0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 2, 0.95, 1.68, 0.22, 0.34, 0.34, 0.45 ],
[31, 0, 0, 1.18, -0.3, 1.0, 100, 1, 2.37, 0.0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 2, 0.95, 3.55, 0.47, 0.71, 0.71, 0.95 ],
[33, 0, 0, 3.51, -0.88, 1.0, 100, 1, 7.02, 0.0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 2, 0.95, 10.53, 1.4, 2.11, 2.11, 2.81 ],
[38, 0, 0, 0.18, -0.04, 1.0, 100, 1, 0.36, 0.0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 2, 0.95, 0.54, 0.07, 0.11, 0.11, 0.14 ],
[39, 0, 0, 1.88, -0.47, 1.0, 100, 1, 3.75, 0.0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 2, 0.95, 5.63, 0.75, 1.13, 1.13, 1.5 ],
[40, 0, 0, 3.8, -0.95, 1.0, 100, 1, 7.59, 0.0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 2, 0.95, 11.39, 1.52, 2.28, 2.28, 3.04 ],
[41, 0, 0, 4.23, -1.06, 1.0, 100, 1, 8.47, 0.0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 2, 0.95, 12.7, 1.69, 2.54, 2.54, 3.39 ],
[43, 0, 0, 6.21, -1.55, 1.0, 100, 1, 12.41, 0.0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 2, 0.95, 18.62, 2.48, 3.72, 3.72, 4.96 ],
[44, 0, 0, 0.26, -0.06, 1.0, 100, 1, 0.51, 0.0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 2, 0.95, 0.77, 0.1, 0.15, 0.15, 0.21 ],
[45, 0, 0, 0.9, -0.22, 1.0, 100, 1, 1.79, 0.0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 2, 0.95, 2.69, 0.36, 0.54, 0.54, 0.72 ],
[47, 0, 0, 0.06, -0.02, 1.0, 100, 1, 0.13, 0.0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 2, 0.95, 0.19, 0.03, 0.04, 0.04, 0.05 ],
[48, 0, 0, 0.13, -0.03, 1.0, 100, 1, 0.25, 0.0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 2, 0.95, 0.38, 0.05, 0.08, 0.08, 0.1 ],
[49, 0, 0, 4.63, -1.16, 1.0, 100, 1, 9.26, 0.0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 2, 0.95, 13.89, 1.85, 2.78, 2.78, 3.7 ],
[65, 0, 0, 0.24, -0.06, 1.0, 100, 1, 0.48, 0.0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 2, 0.95, 0.72, 0.1, 0.14, 0.14, 0.19 ],
[71, 0, 0, 8.34, -2.08, 1.0, 100, 1, 16.68, 0.0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 2, 0.95, 25.02, 3.34, 5.0, 5.0, 6.67 ],
[72, 0, 0, 22.69, -5.67, 1.0, 100, 1, 45.38, 0.0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 2, 0.95, 68.07, 9.08, 13.61, 13.61, 18.15 ],
[73, 0, 0, 2.23, -0.56, 1.0, 100, 1, 4.47, 0.0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 2, 0.95, 6.7, 0.89, 1.34, 1.34, 1.79 ],
[75, 0, 0, 36.62, -9.16, 1.0, 100, 1, 73.25, 0.0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 2, 0.95, 109.87, 14.65, 21.97, 21.97, 29.3 ],
[79, 0, 0, 2.0, -0.5, 1.0, 100, 1, 4.0, 0.0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 2, 0.95, 6.0, 0.8, 1.2, 1.2, 1.6 ],
[80, 0, 0, 0.57, -0.14, 1.0, 100, 1, 1.15, 0.0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 2, 0.95, 1.72, 0.23, 0.34, 0.34, 0.46 ],
[81, 0, 0, 3.88, -0.97, 1.0, 100, 1, 7.76, 0.0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 2, 0.95, 11.64, 1.55, 2.33, 2.33, 3.1 ],
[98, 0, 0, 0.75, -0.19, 1.0, 100, 1, 1.51, 0.0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 2, 0.95, 2.26, 0.3, 0.45, 0.45, 0.6 ],
[101, 0, 0, 1.92, -0.48, 1.0, 100, 1, 3.85, 0.0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 2, 0.95, 5.77, 0.77, 1.15, 1.15, 1.54 ],
[102, 0, 0, 2.18, -0.55, 1.0, 100, 1, 4.37, 0.0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 2, 0.95, 6.55, 0.87, 1.31, 1.31, 1.75 ],
[177, 0, 0, 1.29, -0.32, 1.0, 100, 1, 2.59, 0.0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 2, 0.95, 3.88, 0.52, 0.78, 0.78, 1.03 ],
[179, 0, 0, 3.31, -0.83, 1.0, 100, 1, 6.63, 0.0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 2, 0.95, 9.94, 1.33, 1.99, 1.99, 2.65 ],
[180, 0, 0, 3.63, -0.91, 1.0, 100, 1, 7.26, 0.0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 2, 0.95, 10.88, 1.45, 2.18, 2.18, 2.9 ],
[181, 0, 0, 1.3, -0.32, 1.0, 100, 1, 2.6, 0.0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 2, 0.95, 3.89, 0.52, 0.78, 0.78, 1.04 ],
[182, 0, 0, 0.26, -0.07, 1.0, 100, 1, 0.53, 0.0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 2, 0.95, 0.79, 0.11, 0.16, 0.16, 0.21 ],
[183, 0, 0, 0.77, -0.19, 1.0, 100, 1, 1.53, 0.0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 2, 0.95, 2.3, 0.31, 0.46, 0.46, 0.61 ],
[192, 0, 0, 2.48, -0.62, 1.0, 100, 1, 4.97, 0.0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 2, 0.95, 7.45, 0.99, 1.49, 1.49, 1.99 ],
[217, 0, 0, 0.51, -0.13, 1.0, 100, 1, 1.02, 0.0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 2, 0.95, 1.53, 0.2, 0.31, 0.31, 0.41 ],
[218, 0, 0, 0.73, -0.18, 1.0, 100, 1, 1.45, 0.0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 2, 0.95, 2.18, 0.29, 0.44, 0.44, 0.58 ],
[276, 0, 0, 7.59, -1.9, 1.0, 100, 1, 15.19, 0.0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 2, 0.95, 22.78, 3.04, 4.56, 4.56, 6.07 ],
[278, 0, 0, 2.61, -0.65, 1.0, 100, 1, 5.22, 0.0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 2, 0.95, 7.84, 1.04, 1.57, 1.57, 2.09 ],
[288, 0, 0, 3.09, -0.77, 1.0, 100, 1, 6.17, 0.0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 2, 0.95, 9.26, 1.23, 1.85, 1.85, 2.47 ],
[289, 0, 0, 2.26, -0.56, 1.0, 100, 1, 4.51, 0.0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 2, 0.95, 6.77, 0.9, 1.35, 1.35, 1.8 ],
[291, 0, 0, 3.77, -0.94, 1.0, 100, 1, 7.55, 0.0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 2, 0.95, 11.32, 1.51, 2.26, 2.26, 3.02 ],
[292, 0, 0, 1.94, -0.48, 1.0, 100, 1, 3.88, 0.0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 2, 0.95, 5.82, 0.78, 1.16, 1.16, 1.55 ],
[311, 0, 0, 2.16, -0.54, 1.0, 100, 1, 4.32, 0.0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 2, 0.95, 6.48, 0.86, 1.3, 1.3, 1.73 ],
[321, 0, 0, 5.48, -1.37, 1.0, 100, 1, 10.96, 0.0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 2, 0.95, 16.44, 2.19, 3.29, 3.29, 4.38 ],
[322, 0, 0, 4.39, -1.1, 1.0, 100, 1, 8.77, 0.0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 2, 0.95, 13.16, 1.75, 2.63, 2.63, 3.51 ],
[323, 0, 0, 1.5, -0.38, 1.0, 100, 1, 3.0, 0.0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 2, 0.95, 4.5, 0.6, 0.9, 0.9, 1.2 ],
[324, 0, 0, 1.74, -0.44, 1.0, 100, 1, 3.49, 0.0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 2, 0.95, 5.23, 0.7, 1.05, 1.05, 1.39 ],
[325, 0, 0, 1.9, -0.48, 1.0, 100, 1, 3.81, 0.0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 2, 0.95, 5.71, 0.76, 1.14, 1.14, 1.52 ],
[338, 0, 0, 2.39, -0.6, 1.0, 100, 1, 4.78, 0.0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 2, 0.95, 7.17, 0.96, 1.43, 1.43, 1.91 ],
[339, 0, 0, 8.66, -2.17, 1.0, 100, 1, 17.33, 0.0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 2, 0.95, 25.99, 3.47, 5.2, 5.2, 6.93 ],
[340, 0, 0, 4.7, -1.18, 1.0, 100, 1, 9.4, 0.0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 2, 0.95, 14.11, 1.88, 2.82, 2.82, 3.76 ],
[341, 0, 0, 0.83, -0.21, 1.0, 100, 1, 1.65, 0.0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 2, 0.95, 2.48, 0.33, 0.5, 0.5, 0.66 ],
[342, 0, 0, 1.48, -0.37, 1.0, 100, 1, 2.97, 0.0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 2, 0.95, 4.45, 0.59, 0.89, 0.89, 1.19 ],
[343, 0, 0, 7.26, -1.82, 1.0, 100, 1, 14.52, 0.0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 2, 0.95, 21.78, 2.9, 4.36, 4.36, 5.81 ],
[344, 0, 0, 0.09, -0.02, 1.0, 100, 1, 0.17, 0.0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 2, 0.95, 0.26, 0.03, 0.05, 0.05, 0.07 ],
[345, 0, 0, 0.12, -0.03, 1.0, 100, 1, 0.24, 0.0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 2, 0.95, 0.36, 0.05, 0.07, 0.07, 0.1 ],
[346, 0, 0, 0.13, -0.03, 1.0, 100, 1, 0.25, 0.0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 2, 0.95, 0.38, 0.05, 0.08, 0.08, 0.1 ],
[363, 0, 0, 0.13, -0.03, 1.0, 100, 1, 0.25, 0.0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 2, 0.95, 0.38, 0.05, 0.08, 0.08, 0.1 ],
[400, 0, 0, 1.32, -0.33, 1.0, 100, 1, 2.65, 0.0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 2, 0.95, 3.97, 0.53, 0.79, 0.79, 1.06 ],
[422, 0, 0, 1.73, -0.43, 1.0, 100, 1, 3.45, 0.0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 2, 0.95, 5.18, 0.69, 1.04, 1.04, 1.38 ],
[429, 0, 0, 0.03, -0.01, 1.0, 100, 1, 0.05, 0.0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 2, 0.95, 0.08, 0.01, 0.02, 0.02, 0.02 ],
[435, 0, 0, 17.43, -4.36, 1.0, 100, 1, 34.86, 0.0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 2, 0.95, 52.3, 6.97, 10.46, 10.46, 13.95 ],
[436, 0, 0, 5.1, -1.27, 1.0, 100, 1, 10.2, 0.0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 2, 0.95, 15.29, 2.04, 3.06, 3.06, 4.08 ],
[440, 0, 0, 1.38, -0.35, 1.0, 100, 1, 2.77, 0.0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 2, 0.95, 4.15, 0.55, 0.83, 0.83, 1.11 ],
[441, 0, 0, 2.33, -0.58, 1.0, 100, 1, 4.67, 0.0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 2, 0.95, 7.0, 0.93, 1.4, 1.4, 1.87 ],
[492, 0, 0, 2.17, -0.54, 1.0, 100, 1, 4.34, 0.0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 2, 0.95, 6.51, 0.87, 1.3, 1.3, 1.73 ],
[493, 0, 0, 4.73, -1.18, 1.0, 100, 1, 9.46, 0.0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 2, 0.95, 14.19, 1.89, 2.84, 2.84, 3.78 ],
[496, 0, 0, 0.72, -0.18, 1.0, 100, 1, 1.44, 0.0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 2, 0.95, 2.16, 0.29, 0.43, 0.43, 0.58 ],
[497, 0, 0, 0.17, -0.04, 1.0, 100, 1, 0.35, 0.0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 2, 0.95, 0.52, 0.07, 0.1, 0.1, 0.14 ],
[498, 0, 0, 3.86, -0.96, 1.0, 100, 1, 7.72, 0.0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 2, 0.95, 11.58, 1.54, 2.32, 2.32, 3.09 ],
[502, 0, 0, 13.63, -3.41, 1.0, 100, 1, 27.27, 0.0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 2, 0.95, 40.9, 5.45, 8.18, 8.18, 10.91 ],
[503, 0, 0, 2.91, -0.73, 1.0, 100, 1, 5.81, 0.0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 2, 0.95, 8.72, 1.16, 1.74, 1.74, 2.32 ],
[504, 0, 0, 1.07, -0.27, 1.0, 100, 1, 2.15, 0.0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 2, 0.95, 3.22, 0.43, 0.64, 0.64, 0.86 ],
[505, 0, 0, 0.02, -0.0, 1.0, 100, 1, 0.04, 0.0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 2, 0.95, 0.06, 0.01, 0.01, 0.01, 0.01 ],
[511, 0, 0, 4.4, -1.1, 1.0, 100, 1, 8.8, 0.0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 2, 0.95, 13.19, 1.76, 2.64, 2.64, 3.52 ],
[512, 0, 0, 5.92, -1.48, 1.0, 100, 1, 11.83, 0.0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 2, 0.95, 17.75, 2.37, 3.55, 3.55, 4.73 ],
[557, 0, 0, 2.09, -0.52, 1.0, 100, 1, 4.18, 0.0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 2, 0.95, 6.28, 0.84, 1.26, 1.26, 1.67 ],
[558, 0, 0, 27.71, -6.93, 1.0, 100, 1, 55.42, 0.0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 2, 0.95, 83.12, 11.08, 16.62, 16.62, 22.17 ],
[559, 0, 0, 2.54, -0.64, 1.0, 100, 1, 5.09, 0.0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 2, 0.95, 7.63, 1.02, 1.53, 1.53, 2.03 ],
[560, 0, 0, 17.98, -4.49, 1.0, 100, 1, 35.96, 0.0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 2, 0.95, 53.94, 7.19, 10.79, 10.79, 14.38 ],
[569, 0, 0, 2.04, -0.51, 1.0, 100, 1, 4.09, 0.0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 2, 0.95, 6.13, 0.82, 1.23, 1.23, 1.64 ],
[570, 0, 0, 3.77, -0.94, 1.0, 100, 1, 7.54, 0.0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 2, 0.95, 11.31, 1.51, 2.26, 2.26, 3.02 ],
])
ppc["branch"] = array([
[8, 9, 0.00024379, 0.00243793, 0.35006327, 2395, 0, 0, 0, 0, 1, -30, 30, 0.1 ],
[492, 11, 0.0045562, 0.01822479, 0.04820045, 346, 0, 0, 0, 0, 1, -30, 30, 0.1 ],
[11, 493, 0.00757174, 0.03028694, 0.0801021, 346, 0, 0, 0, 0, 1, -30, 30, 0.1 ],
[492, 493, 0.01130413, 0.04521653, 0.11958747, 346, 0, 0, 0, 0, 1, -30, 30, 0.1 ],
[17, 18, 0.00462352, 0.04623523, 0.9335989, 898, 0, 0, 0, 0, 1, -30, 30, 0.1 ],
[17, 12, 0.0005602, 0.00560203, 0.1131183, 898, 0, 0, 0, 0, 1, -30, 30, 0.1 ],
[20, 21, 0.00108334, 0.01083345, 0.09722357, 598, 0, 0, 0, 0, 1, -30, 30, 0.1 ],
[20, 22, 0.00099339, 0.00993386, 0.3566014, 1197, 0, 0, 0, 0, 1, -30, 30, 0.1 ],
[497, 23, 0.0005476, 0.00219041, 0.00579315, 346, 0, 0, 0, 0, 1, -30, 30, 0.1 ],
[25, 22, 0.00035578, 0.00355783, 0.03192931, 598, 0, 0, 0, 0, 1, -30, 30, 0.1 ],
[8, 21, 0.00098947, 0.00989474, 0.0887992, 598, 0, 0, 0, 0, 1, -30, 30, 0.1 ],
[31, 32, 0.00299776, 0.02997761, 0.60531903, 898, 0, 0, 0, 0, 1, -30, 30, 0.1 ],
[32, 33, 0.00167622, 0.01676223, 0.33846928, 898, 0, 0, 0, 0, 1, -30, 30, 0.1 ],
[37, 10, 0.00240464, 0.0240464, 0.48555384, 898, 0, 0, 0, 0, 1, -30, 30, 0.1 ],
[10, 38, 0.00068488, 0.0068488, 0.13829351, 898, 0, 0, 0, 0, 1, -30, 30, 0.1 ],
[37, 38, 0.00143783, 0.01437835, 1.16133176, 1796, 0, 0, 0, 0, 1, -30, 30, 0.1 ],
[39, 40, 0.00452163, 0.0452163, 0.91302431, 898, 0, 0, 0, 0, 1, -30, 30, 0.1 ],
[39, 41, 0.0017467, 0.01746699, 0.35269996, 898, 0, 0, 0, 0, 1, -30, 30, 0.1 ],
[42, 41, 0.00311454, 0.03114543, 0.6289001, 898, 0, 0, 0, 0, 1, -30, 30, 0.1 ],
[18, 42, 0.00343975, 0.03439751, 0.69456727, 898, 0, 0, 0, 0, 1, -30, 30, 0.1 ],
[492, 43, 0.00910612, 0.03642446, 0.09633445, 346, 0, 0, 0, 0, 1, -30, 30, 0.1 ],
[492, 43, 0.00909587, 0.03638347, 0.09622603, 346, 0, 0, 0, 0, 1, -30, 30, 0.1 ],
[44, 45, 0.00640579, 0.02562314, 0.0677674, 346, 0, 0, 0, 0, 1, -30, 30, 0.1 ],
[44, 505, 0.00151537, 0.00606149, 0.01603126, 346, 0, 0, 0, 0, 1, -30, 30, 0.1 ],
[46, 12, 0.00029449, 0.00294494, 0.1057163, 1197, 0, 0, 0, 0, 1, -30, 30, 0.1 ],
[46, 12, 0.00029482, 0.00294823, 0.10583438, 1197, 0, 0, 0, 0, 1, -30, 30, 0.1 ],
[47, 48, 0.00053442, 0.00534418, 0.01199019, 299, 0, 0, 0, 0, 1, -30, 30, 0.1 ],
[31, 33, 0.0013476, 0.01347599, 0.27211226, 898, 0, 0, 0, 0, 1, -30, 30, 0.1 ],
[71, 72, 0.00088786, 0.00887864, 0.31872128, 1197, 0, 0, 0, 0, 1, -30, 30, 0.1 ],
[73, 74, 0.00125295, 0.01252955, 0.25300129, 898, 0, 0, 0, 0, 1, -30, 30, 0.1 ],
[37, 75, 0.00274591, 0.02745914, 0.5544652, 898, 0, 0, 0, 0, 1, -30, 30, 0.1 ],
[72, 75, 0.00066887, 0.00668871, 0.24010838, 1197, 0, 0, 0, 0, 1, -30, 30, 0.1 ],
[37, 72, 0.00362221, 0.03622207, 0.73140949, 898, 0, 0, 0, 0, 1, -30, 30, 0.1 ],
[73, 72, 0.00254751, 0.02547507, 0.51440208, 898, 0, 0, 0, 0, 1, -30, 30, 0.1 ],
[18, 40, 0.00130277, 0.0130277, 0.26306019, 898, 0, 0, 0, 0, 1, -30, 30, 0.1 ],
[492, 45, 0.00771758, 0.0308703, 0.18370115, 520, 0, 0, 0, 0, 1, -30, 30, 0.1 ],
[10, 74, 0.00301674, 0.03016736, 0.60915055, 898, 0, 0, 0, 0, 1, -30, 30, 0.1 ],
[45, 511, 0.02050843, 0.08203372, 0.05424015, 173, 0, 0, 0, 0, 1, -30, 30, 0.1 ],
[78, 32, 0.00134588, 0.0134588, 0.48313778, 1197, 0, 0, 0, 0, 1, -30, 30, 0.1 ],
[79, 80, 0.00076233, 0.00762327, 0.06841417, 598, 0, 0, 0, 0, 1, -30, 30, 0.1 ],
[79, 80, 0.00076174, 0.00761738, 0.06836134, 598, 0, 0, 0, 0, 1, -30, 30, 0.1 ],
[81, 79, 0.00215305, 0.02153047, 0.19322279, 598, 0, 0, 0, 0, 1, -30, 30, 0.1 ],
[81, 79, 0.00215357, 0.02153566, 0.1932694, 598, 0, 0, 0, 0, 1, -30, 30, 0.1 ],
[42, 98, 0.00061861, 0.00618611, 0.22206638, 1197, 0, 0, 0, 0, 1, -30, 30, 0.1 ],
[42, 98, 0.00061835, 0.00618352, 0.22197315, 1197, 0, 0, 0, 0, 1, -30, 30, 0.1 ],
[42, 101, 0.00081653, 0.00816534, 0.29311568, 1197, 0, 0, 0, 0, 1, -30, 30, 0.1 ],
[102, 42, 0.0012403, 0.01240305, 0.44523901, 1197, 0, 0, 0, 0, 1, -30, 30, 0.1 ],
[37, 39, 0.00065102, 0.00651021, 0.23370076, 1197, 0, 0, 0, 0, 1, -30, 30, 0.1 ],
[177, 496, 0.00932496, 0.03729983, 0.09864961, 346, 0, 0, 0, 0, 1, -30, 30, 0.1 ],
[177, 496, 0.00931603, 0.03726413, 0.09855518, 346, 0, 0, 0, 0, 1, -30, 30, 0.1 ],
[179, 493, 0.01426992, 0.05707967, 0.15096279, 346, 0, 0, 0, 0, 1, -30, 30, 0.1 ],
[180, 181, 0.01025686, 0.04102744, 0.10850827, 346, 0, 0, 0, 0, 1, -30, 30, 0.1 ],
[182, 180, 0.00433818, 0.01735273, 0.04589403, 346, 0, 0, 0, 0, 1, -30, 30, 0.1 ],
[179, 181, 0.00489306, 0.01957223, 0.05176412, 346, 0, 0, 0, 0, 1, -30, 30, 0.1 ],
[180, 493, 0.0166914, 0.06676562, 0.17657993, 346, 0, 0, 0, 0, 1, -30, 30, 0.1 ],
[183, 30, 0.00049645, 0.00496451, 0.17821369, 1197, 0, 0, 0, 0, 1, -30, 30, 0.1 ],
[183, 21, 0.00025687, 0.00256873, 0.36884485, 2395, 0, 0, 0, 0, 1, -30, 30, 0.1 ],
[183, 21, 0.00051295, 0.0051295, 0.18413654, 1197, 0, 0, 0, 0, 1, -30, 30, 0.1 ],
[183, 30, 0.00049609, 0.00496087, 0.17808317, 1197, 0, 0, 0, 0, 1, -30, 30, 0.1 ],
[504, 192, 0.00015355, 0.00061421, 0.00162446, 346, 0, 0, 0, 0, 1, -30, 30, 0.1 ],
[504, 192, 0.00015421, 0.00061686, 0.00163145, 346, 0, 0, 0, 0, 1, -30, 30, 0.1 ],
[217, 98, 0.00012787, 0.00127874, 0.04590362, 1197, 0, 0, 0, 0, 1, -30, 30, 0.1 ],
[504, 218, 0.00687025, 0.02748099, 0.07268099, 346, 0, 0, 0, 0, 1, -30, 30, 0.1 ],
[177, 504, 0.01763702, 0.0705481, 0.18658373, 346, 0, 0, 0, 0, 1, -30, 30, 0.1 ],
[37, 39, 0.00086777, 0.00867775, 0.1752243, 898, 0, 0, 0, 0, 1, -30, 30, 0.1 ],
[20, 22, 0.00099413, 0.00994131, 0.35686864, 1197, 0, 0, 0, 0, 1, -30, 30, 0.1 ],
[20, 21, 0.00108314, 0.01083137, 0.09720492, 598, 0, 0, 0, 0, 1, -30, 30, 0.1 ],
[503, 276, 0.00335322, 0.01341289, 0.03547406, 346, 0, 0, 0, 0, 1, -30, 30, 0.1 ],
[503, 276, 0.00335372, 0.01341488, 0.03547931, 346, 0, 0, 0, 0, 1, -30, 30, 0.1 ],
[503, 504, 0.03215471, 0.12861884, 0.34016769, 346, 0, 0, 0, 0, 1, -30, 30, 0.1 ],
[503, 504, 0.03216364, 0.12865455, 0.34026211, 346, 0, 0, 0, 0, 1, -30, 30, 0.1 ],
[177, 218, 0.01082595, 0.0433038, 0.11452874, 346, 0, 0, 0, 0, 1, -30, 30, 0.1 ],
[182, 180, 0.00433157, 0.01732628, 0.04582409, 346, 0, 0, 0, 0, 1, -30, 30, 0.1 ],
[277, 278, 0.00143837, 0.01438366, 0.51633804, 1197, 0, 0, 0, 0, 1, -30, 30, 0.1 ],
[277, 278, 0.00143823, 0.01438227, 0.51628832, 1197, 0, 0, 0, 0, 1, -30, 30, 0.1 ],
[557, 558, 0.01085322, 0.04341289, 0.25833884, 520, 0, 0, 0, 0, 1, -30, 30, 0.1 ],
[557, 559, 0.00853967, 0.03415868, 0.09034196, 346, 0, 0, 0, 0, 1, -30, 30, 0.1 ],
[559, 558, 0.01118579, 0.04474314, 0.11833547, 346, 0, 0, 0, 0, 1, -30, 30, 0.1 ],
[277, 78, 0.00358577, 0.03585769, 0.32180078, 598, 0, 0, 0, 0, 1, -30, 30, 0.1 ],
[277, 279, 0.00213909, 0.02139093, 0.19197048, 598, 0, 0, 0, 0, 1, -30, 30, 0.1 ],
[78, 279, 0.0015812, 0.01581198, 0.14190284, 598, 0, 0, 0, 0, 1, -30, 30, 0.1 ],
[8, 102, 0.00151001, 0.01510007, 0.5420555, 1197, 0, 0, 0, 0, 1, -30, 30, 0.1 ],
[8, 101, 0.00192469, 0.01924688, 0.69091598, 1197, 0, 0, 0, 0, 1, -30, 30, 0.1 ],
[80, 288, 0.00159713, 0.01597126, 0.14333228, 598, 0, 0, 0, 0, 1, -30, 30, 0.1 ],
[80, 288, 0.00159681, 0.01596814, 0.14330432, 598, 0, 0, 0, 0, 1, -30, 30, 0.1 ],
[80, 289, 0.00013825, 0.0013825, 0.027916, 898, 0, 0, 0, 0, 1, -30, 30, 0.1 ],
[80, 289, 0.00014241, 0.00142405, 0.02875502, 898, 0, 0, 0, 0, 1, -30, 30, 0.1 ],
[80, 289, 0.00015471, 0.00154709, 0.03123945, 898, 0, 0, 0, 0, 1, -30, 30, 0.1 ],
[80, 289, 0.00015129, 0.00151293, 0.03054959, 898, 0, 0, 0, 0, 1, -30, 30, 0.1 ],
[276, 560, 0.00889322, 0.03557289, 0.02352056, 173, 0, 0, 0, 0, 1, -30, 30, 0.1 ],
[276, 560, 0.00889157, 0.03556628, 0.02351619, 173, 0, 0, 0, 0, 1, -30, 30, 0.1 ],
[37, 290, 0.00112768, 0.01127678, 0.22770489, 898, 0, 0, 0, 0, 1, -30, 30, 0.1 ],
[290, 74, 0.00414344, 0.04143444, 0.83665966, 898, 0, 0, 0, 0, 1, -30, 30, 0.1 ],
[290, 74, 0.00414319, 0.0414319, 0.83660839, 898, 0, 0, 0, 0, 1, -30, 30, 0.1 ],
[37, 290, 0.0011259, 0.011259, 0.22734598, 898, 0, 0, 0, 0, 1, -30, 30, 0.1 ],
[512, 291, 0.00265967, 0.01063868, 0.02813689, 346, 0, 0, 0, 0, 1, -30, 30, 0.1 ],
[512, 291, 0.00266496, 0.01065983, 0.02819285, 346, 0, 0, 0, 0, 1, -30, 30, 0.1 ],
[78, 292, 0.0011638, 0.01163804, 0.23499969, 898, 0, 0, 0, 0, 1, -30, 30, 0.1 ],
[78, 292, 0.001163, 0.01162996, 0.23483654, 898, 0, 0, 0, 0, 1, -30, 30, 0.1 ],
[71, 74, 0.00390452, 0.03904524, 0.78841612, 898, 0, 0, 0, 0, 1, -30, 30, 0.1 ],
[74, 278, 0.00154224, 0.01542244, 0.55362774, 1197, 0, 0, 0, 0, 1, -30, 30, 0.1 ],
[74, 278, 0.00154245, 0.01542452, 0.55370232, 1197, 0, 0, 0, 0, 1, -30, 30, 0.1 ],
[32, 292, 0.00096794, 0.00967936, 0.34746542, 1197, 0, 0, 0, 0, 1, -30, 30, 0.1 ],
[503, 560, 0.00378512, 0.0151405, 0.16017272, 693, 0, 0, 0, 0, 1, -30, 30, 0.1 ],
[311, 280, 0.00034337, 0.00343369, 0.1232611, 1197, 0, 0, 0, 0, 1, -30, 30, 0.1 ],
[280, 278, 0.00097498, 0.00974977, 0.78748387, 1796, 0, 0, 0, 0, 1, -30, 30, 0.1 ],
[311, 32, 0.00241133, 0.02411334, 0.48690559, 898, 0, 0, 0, 0, 1, -30, 30, 0.1 ],
[557, 321, 0.00500298, 0.0200119, 0.05292694, 346, 0, 0, 0, 0, 1, -30, 30, 0.1 ],
[277, 65, 0.00188585, 0.01885849, 0.38079775, 898, 0, 0, 0, 0, 1, -30, 30, 0.1 ],
[322, 288, 0.001309, 0.01309003, 0.26431871, 898, 0, 0, 0, 0, 1, -30, 30, 0.1 ],
[322, 323, 0.00035076, 0.00350762, 0.07082712, 898, 0, 0, 0, 0, 1, -30, 30, 0.1 ],
[322, 323, 0.00037006, 0.0037006, 0.0747239, 898, 0, 0, 0, 0, 1, -30, 30, 0.1 ],
[277, 324, 0.00197195, 0.01971953, 0.39818407, 898, 0, 0, 0, 0, 1, -30, 30, 0.1 ],
[324, 325, 0.00110351, 0.01103509, 0.2228246, 898, 0, 0, 0, 0, 1, -30, 30, 0.1 ],
[277, 325, 0.00086657, 0.00866574, 0.17498191, 898, 0, 0, 0, 0, 1, -30, 30, 0.1 ],
[332, 78, 0.00129444, 0.01294437, 0.26137749, 898, 0, 0, 0, 0, 1, -30, 30, 0.1 ],
[322, 288, 0.001309, 0.01309003, 0.26431871, 898, 0, 0, 0, 0, 1, -30, 30, 0.1 ],
[324, 288, 0.00126274, 0.01262742, 0.1133234, 598, 0, 0, 0, 0, 1, -30, 30, 0.1 ],
[338, 559, 0.00230702, 0.0092281, 0.09762492, 693, 0, 0, 0, 0, 1, -30, 30, 0.1 ],
[339, 559, 0.00890149, 0.03560595, 0.02354242, 173, 0, 0, 0, 0, 1, -30, 30, 0.1 ],
[339, 340, 0.02177884, 0.08711537, 0.23040041, 346, 0, 0, 0, 0, 1, -30, 30, 0.1 ],
[559, 340, 0.05245818, 0.20983273, 0.13874, 173, 0, 0, 0, 0, 1, -30, 30, 0.1 ],
[341, 292, 9.329e-05, 0.00093294, 0.07535316, 1796, 0, 0, 0, 0, 1, -30, 30, 0.1 ],
[338, 559, 0.00461405, 0.0184562, 0.04881246, 346, 0, 0, 0, 0, 1, -30, 30, 0.1 ],
[557, 342, 0.00302595, 0.0121038, 0.03201181, 346, 0, 0, 0, 0, 1, -30, 30, 0.1 ],
[558, 343, 0.00266256, 0.01065025, 0.11266997, 693, 0, 0, 0, 0, 1, -30, 30, 0.1 ],
[502, 340, 0.01086926, 0.04347702, 0.11498688, 346, 0, 0, 0, 0, 1, -30, 30, 0.1 ],
[502, 340, 0.01086876, 0.04347504, 0.11498163, 346, 0, 0, 0, 0, 1, -30, 30, 0.1 ],
[72, 32, 0.00135107, 0.01351073, 0.48500226, 1197, 0, 0, 0, 0, 1, -30, 30, 0.1 ],
[72, 32, 0.001351, 0.01351004, 0.4849774, 1197, 0, 0, 0, 0, 1, -30, 30, 0.1 ],
[344, 345, 5.763e-05, 0.00057629, 0.04654687, 1796, 0, 0, 0, 0, 1, -30, 30, 0.1 ],
[346, 47, 0.0001134, 0.001134, 0.04070792, 1197, 0, 0, 0, 0, 1, -30, 30, 0.1 ],
[46, 47, 8.975e-05, 0.00089751, 0.0322183, 1197, 0, 0, 0, 0, 1, -30, 30, 0.1 ],
[346, 345, 7.218e-05, 0.00072178, 0.02591013, 1197, 0, 0, 0, 0, 1, -30, 30, 0.1 ],
[363, 344, 2.663e-05, 0.00026627, 0.00955859, 1197, 0, 0, 0, 0, 1, -30, 30, 0.1 ],
[332, 78, 0.00129421, 0.01294206, 0.26133088, 898, 0, 0, 0, 0, 1, -30, 30, 0.1 ],
[37, 49, 0.0016876, 0.01687604, 0.15145211, 598, 0, 0, 0, 0, 1, -30, 30, 0.1 ],
[37, 49, 0.0016883, 0.01688296, 0.15151426, 598, 0, 0, 0, 0, 1, -30, 30, 0.1 ],
[81, 74, 0.00150357, 0.01503566, 0.13493589, 598, 0, 0, 0, 0, 1, -30, 30, 0.1 ],
[81, 74, 0.00150416, 0.01504155, 0.13498871, 598, 0, 0, 0, 0, 1, -30, 30, 0.1 ],
[278, 80, 0.00325679, 0.03256787, 0.29227666, 598, 0, 0, 0, 0, 1, -30, 30, 0.1 ],
[278, 80, 0.0032572, 0.03257202, 0.29231395, 598, 0, 0, 0, 0, 1, -30, 30, 0.1 ],
[81, 278, 0.00421184, 0.04211842, 0.37798704, 598, 0, 0, 0, 0, 1, -30, 30, 0.1 ],
[81, 278, 0.0042108, 0.04210803, 0.37789381, 598, 0, 0, 0, 0, 1, -30, 30, 0.1 ],
[569, 570, 0.00813488, 0.0325395, 0.08605961, 346, 0, 0, 0, 0, 1, -30, 30, 0.1 ],
[498, 400, 0.00303355, 0.01213421, 0.03209225, 346, 0, 0, 0, 0, 1, -30, 30, 0.1 ],
[557, 342, 0.00300992, 0.01203967, 0.0318422, 346, 0, 0, 0, 0, 1, -30, 30, 0.1 ],
[557, 321, 0.00500231, 0.02000926, 0.05291995, 346, 0, 0, 0, 0, 1, -30, 30, 0.1 ],
[277, 65, 0.00188603, 0.01886034, 0.38083504, 898, 0, 0, 0, 0, 1, -30, 30, 0.1 ],
[8, 21, 0.00098975, 0.00989751, 0.08882406, 598, 0, 0, 0, 0, 1, -30, 30, 0.1 ],
[311, 32, 0.00241182, 0.02411819, 0.48700348, 898, 0, 0, 0, 0, 1, -30, 30, 0.1 ],
[422, 81, 0.0002536, 0.00253601, 0.02275915, 598, 0, 0, 0, 0, 1, -30, 30, 0.1 ],
[422, 81, 0.00023449, 0.00234488, 0.02104382, 598, 0, 0, 0, 0, 1, -30, 30, 0.1 ],
[422, 81, 0.00023272, 0.00232722, 0.02088534, 598, 0, 0, 0, 0, 1, -30, 30, 0.1 ],
[422, 81, 0.00023518, 0.0023518, 0.02110597, 598, 0, 0, 0, 0, 1, -30, 30, 0.1 ],
[422, 81, 0.00023269, 0.00232687, 0.02088223, 598, 0, 0, 0, 0, 1, -30, 30, 0.1 ],
[422, 81, 0.0002536, 0.00253601, 0.02275915, 598, 0, 0, 0, 0, 1, -30, 30, 0.1 ],
[45, 429, 0.00640579, 0.02562314, 0.0677674, 346, 0, 0, 0, 0, 1, -30, 30, 0.1 ],
[44, 429, 1.322e-05, 5.289e-05, 0.00013989, 346, 0, 0, 0, 0, 1, -30, 30, 0.1 ],
[505, 429, 0.00150314, 0.00601256, 0.01590186, 346, 0, 0, 0, 0, 1, -30, 30, 0.1 ],
[32, 436, 0.00044813, 0.0044813, 0.16086776, 1197, 0, 0, 0, 0, 1, -30, 30, 0.1 ],
[435, 436, 6.634e-05, 0.00066343, 0.02381569, 1197, 0, 0, 0, 0, 1, -30, 30, 0.1 ],
[78, 436, 0.00089768, 0.0089768, 0.32224515, 1197, 0, 0, 0, 0, 1, -30, 30, 0.1 ],
[181, 441, 0.01020132, 0.04080529, 0.10792074, 346, 0, 0, 0, 0, 1, -30, 30, 0.1 ],
[440, 441, 3.306e-05, 0.00013223, 0.00034972, 346, 0, 0, 0, 0, 1, -30, 30, 0.1 ],
[504, 441, 0.01479025, 0.05916099, 0.15646741, 346, 0, 0, 0, 0, 1, -30, 30, 0.1 ],
[10, 492, 0.0, 0.005, 0.0, 2000, 0, 0, 1.0, 0, 1, -30, 30, 0.1 ],
[12, 493, 0.0, 0.005, 0.0, 2000, 0, 0, 1.0, 0, 1, -30, 30, 0.1 ],
[18, 496, 0.0, 0.005, 0.0, 2000, 0, 0, 1.0, 0, 1, -30, 30, 0.1 ],
[20, 497, 0.0, 0.005, 0.0, 2000, 0, 0, 1.0, 0, 1, -30, 30, 0.1 ],
[22, 498, 0.0, 0.005, 0.0, 2000, 0, 0, 1.0, 0, 1, -30, 30, 0.1 ],
[32, 502, 0.0, 0.005, 0.0, 2000, 0, 0, 1.0, 0, 1, -30, 30, 0.1 ],
[37, 503, 0.0, 0.005, 0.0, 2000, 0, 0, 1.0, 0, 1, -30, 30, 0.1 ],
[42, 504, 0.0, 0.005, 0.0, 2000, 0, 0, 1.0, 0, 1, -30, 30, 0.1 ],
[46, 505, 0.0, 0.005, 0.0, 2000, 0, 0, 1.0, 0, 1, -30, 30, 0.1 ],
[74, 511, 0.0, 0.005, 0.0, 2000, 0, 0, 1.0, 0, 1, -30, 30, 0.1 ],
[78, 512, 0.0, 0.005, 0.0, 2000, 0, 0, 1.0, 0, 1, -30, 30, 0.1 ],
[277, 557, 0.0, 0.005, 0.0, 2000, 0, 0, 1.0, 0, 1, -30, 30, 0.1 ],
[279, 558, 0.0, 0.005, 0.0, 2000, 0, 0, 1.0, 0, 1, -30, 30, 0.1 ],
[280, 559, 0.0, 0.005, 0.0, 2000, 0, 0, 1.0, 0, 1, -30, 30, 0.1 ],
[290, 560, 0.0, 0.005, 0.0, 2000, 0, 0, 1.0, 0, 1, -30, 30, 0.1 ],
[332, 569, 0.0, 0.005, 0.0, 2000, 0, 0, 1.0, 0, 1, -30, 30, 0.1 ]
])
ppc["gencost"] = array([
[2, 0, 0, 3, 0, 50.0, 0, 75.0, 37.5, 60.0, 30.0 ],
[2, 0, 0, 3, 0, 50.0, 0, 75.0, 37.5, 60.0, 30.0 ],
[2, 0, 0, 3, 0, 6.0, 0, 9.0, 4.5, 7.199999999999999,3.5999999999999996],
[2, 0, 0, 3, 0, 28.0, 0, 42.0, 21.0, 33.6, 16.8 ],
[2, 0, 0, 3, 0, 50.0, 0, 75.0, 37.5, 60.0, 30.0 ],
[2, 0, 0, 3, 0, 100.0, 0, 150.0, 75.0, 120.0, 60.0 ],
[2, 0, 0, 3, 0, 6.0, 0, 9.0, 4.5, 7.199999999999999,3.5999999999999996],
[2, 0, 0, 3, 0, 50.0, 0, 75.0, 37.5, 60.0, 30.0 ],
[2, 0, 0, 3, 0, 25.0, 0, 37.5, 18.75, 30.0, 15.0 ],
[2, 0, 0, 3, 0, 100.0, 0, 150.0, 75.0, 120.0, 60.0 ],
[2, 0, 0, 3, 0, 6.0, 0, 9.0, 4.5, 7.199999999999999,3.5999999999999996],
[2, 0, 0, 3, 0, 50.0, 0, 75.0, 37.5, 60.0, 30.0 ],
[2, 0, 0, 3, 0, 50.0, 0, 75.0, 37.5, 60.0, 30.0 ],
[2, 0, 0, 3, 0, 25.0, 0, 37.5, 18.75, 30.0, 15.0 ],
[2, 0, 0, 3, 0, 6.0, 0, 9.0, 4.5, 7.199999999999999,3.5999999999999996],
[2, 0, 0, 3, 0, 50.0, 0, 75.0, 37.5, 60.0, 30.0 ],
[2, 0, 0, 3, 0, 25.0, 0, 37.5, 18.75, 30.0, 15.0 ],
[2, 0, 0, 3, 0, 6.0, 0, 9.0, 4.5, 7.199999999999999,3.5999999999999996],
[2, 0, 0, 3, 0, 100.0, 0, 150.0, 75.0, 120.0, 60.0 ],
[2, 0, 0, 3, 0, 10.0, 0, 15.0, 7.5, 12.0, 6.0 ],
[2, 0, 0, 3, 0, 50.0, 0, 75.0, 37.5, 60.0, 30.0 ],
[2, 0, 0, 3, 0, 100.0, 0, 150.0, 75.0, 120.0, 60.0 ],
[2, 0, 0, 3, 0, 6.0, 0, 9.0, 4.5, 7.199999999999999,3.5999999999999996],
[2, 0, 0, 3, 0, 10.0, 0, 15.0, 7.5, 12.0, 6.0 ],
[2, 0, 0, 3, 0, 10.0, 0, 15.0, 7.5, 12.0, 6.0 ],
[2, 0, 0, 3, 0, 50.0, 0, 75.0, 37.5, 60.0, 30.0 ],
[2, 0, 0, 3, 0, 6.0, 0, 9.0, 4.5, 7.199999999999999,3.5999999999999996],
[2, 0, 0, 3, 0, 50.0, 0, 75.0, 37.5, 60.0, 30.0 ],
[2, 0, 0, 3, 0, 6.0, 0, 9.0, 4.5, 7.199999999999999,3.5999999999999996],
[2, 0, 0, 3, 0, 50.0, 0, 75.0, 37.5, 60.0, 30.0 ],
[2, 0, 0, 3, 0, 10.0, 0, 15.0, 7.5, 12.0, 6.0 ],
[2, 0, 0, 3, 0, 50.0, 0, 75.0, 37.5, 60.0, 30.0 ],
[2, 0, 0, 3, 0, 50.0, 0, 75.0, 37.5, 60.0, 30.0 ],
[2, 0, 0, 3, 0, 50.0, 0, 75.0, 37.5, 60.0, 30.0 ],
[2, 0, 0, 3, 0, 50.0, 0, 75.0, 37.5, 60.0, 30.0 ],
[2, 0, 0, 3, 0, 50.0, 0, 75.0, 37.5, 60.0, 30.0 ],
[2, 0, 0, 3, 0, 50.0, 0, 75.0, 37.5, 60.0, 30.0 ],
[2, 0, 0, 3, 0, 3.0, 0, 4.5, 2.25, 3.5999999999999996,1.7999999999999998],
[2, 0, 0, 3, 0, 50.0, 0, 75.0, 37.5, 60.0, 30.0 ],
[2, 0, 0, 3, 0, 3.0, 0, 4.5, 2.25, 3.5999999999999996,1.7999999999999998],
[2, 0, 0, 3, 0, 10.0, 0, 15.0, 7.5, 12.0, 6.0 ],
[2, 0, 0, 3, 0, 50.0, 0, 75.0, 37.5, 60.0, 30.0 ],
[2, 0, 0, 3, 0, 100.0, 0, 150.0, 75.0, 120.0, 60.0 ],
[2, 0, 0, 3, 0, 32.0, 0, 48.0, 24.0, 38.4, 19.2 ],
[2, 0, 0, 3, 0, 6.0, 0, 9.0, 4.5, 7.199999999999999,3.5999999999999996],
[2, 0, 0, 3, 0, 50.0, 0, 75.0, 37.5, 60.0, 30.0 ],
[2, 0, 0, 3, 0, 50.0, 0, 75.0, 37.5, 60.0, 30.0 ],
[2, 0, 0, 3, 0, 50.0, 0, 75.0, 37.5, 60.0, 30.0 ],
[2, 0, 0, 3, 0, 25.0, 0, 37.5, 18.75, 30.0, 15.0 ],
[2, 0, 0, 3, 0, 100.0, 0, 150.0, 75.0, 120.0, 60.0 ],
[2, 0, 0, 3, 0, 50.0, 0, 75.0, 37.5, 60.0, 30.0 ],
[2, 0, 0, 3, 0, 50.0, 0, 75.0, 37.5, 60.0, 30.0 ],
[2, 0, 0, 3, 0, 25.0, 0, 37.5, 18.75, 30.0, 15.0 ],
[2, 0, 0, 3, 0, 6.0, 0, 9.0, 4.5, 7.199999999999999,3.5999999999999996],
[2, 0, 0, 3, 0, 50.0, 0, 75.0, 37.5, 60.0, 30.0 ],
[2, 0, 0, 3, 0, 8.0, 0, 12.0, 6.0, 9.6, 4.8 ],
[2, 0, 0, 3, 0, 6.0, 0, 9.0, 4.5, 7.199999999999999,3.5999999999999996],
[2, 0, 0, 3, 0, 50.0, 0, 75.0, 37.5, 60.0, 30.0 ],
[2, 0, 0, 3, 0, 3.0, 0, 4.5, 2.25, 3.5999999999999996,1.7999999999999998],
[2, 0, 0, 3, 0, 100.0, 0, 150.0, 75.0, 120.0, 60.0 ],
[2, 0, 0, 3, 0, 3.0, 0, 4.5, 2.25, 3.5999999999999996,1.7999999999999998],
[2, 0, 0, 3, 0, 3.0, 0, 4.5, 2.25, 3.5999999999999996,1.7999999999999998],
[2, 0, 0, 3, 0, 10.0, 0, 15.0, 7.5, 12.0, 6.0 ],
[2, 0, 0, 3, 0, 50.0, 0, 75.0, 37.5, 60.0, 30.0 ],
[2, 0, 0, 3, 0, 6.0, 0, 9.0, 4.5, 7.199999999999999,3.5999999999999996],
[2, 0, 0, 3, 0, 10.0, 0, 15.0, 7.5, 12.0, 6.0 ],
[2, 0, 0, 3, 0, 6.0, 0, 9.0, 4.5, 7.199999999999999,3.5999999999999996],
[2, 0, 0, 3, 0, 50.0, 0, 75.0, 37.5, 60.0, 30.0 ],
[2, 0, 0, 3, 0, 25.0, 0, 37.5, 18.75, 30.0, 15.0 ],
[2, 0, 0, 3, 0, 50.0, 0, 75.0, 37.5, 60.0, 30.0 ],
[2, 0, 0, 3, 0, 50.0, 0, 75.0, 37.5, 60.0, 30.0 ],
[2, 0, 0, 3, 0, 50.0, 0, 75.0, 37.5, 60.0, 30.0 ],
[2, 0, 0, 3, 0, 50.0, 0, 75.0, 37.5, 60.0, 30.0 ],
[2, 0, 0, 3, 0, 26.0, 0, 39.0, 19.5, 31.2, 15.6 ],
[2, 0, 0, 3, 0, 50.0, 0, 75.0, 37.5, 60.0, 30.0 ],
[2, 0, 0, 3, 0, 3.0, 0, 4.5, 2.25, 3.5999999999999996,1.7999999999999998],
[2, 0, 0, 3, 0, 10.0, 0, 15.0, 7.5, 12.0, 6.0 ],
[2, 0, 0, 3, 0, 25.0, 0, 37.5, 18.75, 30.0, 15.0 ],
[2, 0, 0, 3, 0, 6.0, 0, 9.0, 4.5, 7.199999999999999,3.5999999999999996],
[2, 0, 0, 3, 0, 3.0, 0, 4.5, 2.25, 3.5999999999999996,1.7999999999999998],
[2, 0, 0, 3, 0, 10.0, 0, 15.0, 7.5, 12.0, 6.0 ],
[2, 0, 0, 3, 0, 50.0, 0, 75.0, 37.5, 60.0, 30.0 ],
[2, 0, 0, 3, 0, 6.0, 0, 9.0, 4.5, 7.199999999999999,3.5999999999999996],
[2, 0, 0, 3, 0, 10.0, 0, 15.0, 7.5, 12.0, 6.0 ],
[2, 0, 0, 3, 0, 50.0, 0, 75.0, 37.5, 60.0, 30.0 ],
[2, 0, 0, 3, 0, 100.0, 0, 150.0, 75.0, 120.0, 60.0 ],
[2, 0, 0, 3, 0, 50.0, 0, 75.0, 37.5, 60.0, 30.0 ],
[2, 0, 0, 3, 0, 6.0, 0, 9.0, 4.5, 7.199999999999999,3.5999999999999996],
[2, 0, 0, 3, 0, 10.0, 0, 15.0, 7.5, 12.0, 6.0 ],
[2, 0, 0, 3, 0, 10.0, 0, 15.0, 7.5, 12.0, 6.0 ],
[2, 0, 0, 3, 0, 6.0, 0, 9.0, 4.5, 7.199999999999999,3.5999999999999996],
[2, 0, 0, 3, 0, 10.0, 0, 15.0, 7.5, 12.0, 6.0 ],
[2, 0, 0, 3, 0, 32.0, 0, 48.0, 24.0, 38.4, 19.2 ],
[2, 0, 0, 3, 0, 0.0, 0, 1.5, 0.75, 1.2, 0.6 ],
[2, 0, 0, 3, 0, 0.0, 0, 1.5, 0.75, 1.2, 0.6 ],
[2, 0, 0, 3, 0, 0.0, 0, 1.5, 0.75, 1.2, 0.6 ],
[2, 0, 0, 3, 0, 0.0, 0, 1.5, 0.75, 1.2, 0.6 ],
[2, 0, 0, 3, 0, 0.0, 0, 1.5, 0.75, 1.2, 0.6 ],
[2, 0, 0, 3, 0, 0.0, 0, 1.5, 0.75, 1.2, 0.6 ],
[2, 0, 0, 3, 0, 0.0, 0, 1.5, 0.75, 1.2, 0.6 ],
[2, 0, 0, 3, 0, 0.0, 0, 1.5, 0.75, 1.2, 0.6 ],
[2, 0, 0, 3, 0, 0.0, 0, 1.5, 0.75, 1.2, 0.6 ],
[2, 0, 0, 3, 0, 0.0, 0, 1.5, 0.75, 1.2, 0.6 ],
[2, 0, 0, 3, 0, 0.0, 0, 1.5, 0.75, 1.2, 0.6 ],
[2, 0, 0, 3, 0, 0.0, 0, 1.5, 0.75, 1.2, 0.6 ],
[2, 0, 0, 3, 0, 0.0, 0, 1.5, 0.75, 1.2, 0.6 ],
[2, 0, 0, 3, 0, 0.0, 0, 1.5, 0.75, 1.2, 0.6 ],
[2, 0, 0, 3, 0, 0.0, 0, 1.5, 0.75, 1.2, 0.6 ],
[2, 0, 0, 3, 0, 0.0, 0, 1.5, 0.75, 1.2, 0.6 ],
[2, 0, 0, 3, 0, 0.0, 0, 1.5, 0.75, 1.2, 0.6 ],
[2, 0, 0, 3, 0, 0.0, 0, 1.5, 0.75, 1.2, 0.6 ],
[2, 0, 0, 3, 0, 0.0, 0, 1.5, 0.75, 1.2, 0.6 ],
[2, 0, 0, 3, 0, 0.0, 0, 1.5, 0.75, 1.2, 0.6 ],
[2, 0, 0, 3, 0, 0.0, 0, 1.5, 0.75, 1.2, 0.6 ],
[2, 0, 0, 3, 0, 0.0, 0, 1.5, 0.75, 1.2, 0.6 ],
[2, 0, 0, 3, 0, 0.0, 0, 1.5, 0.75, 1.2, 0.6 ],
[2, 0, 0, 3, 0, 0.0, 0, 1.5, 0.75, 1.2, 0.6 ],
[2, 0, 0, 3, 0, 0.0, 0, 1.5, 0.75, 1.2, 0.6 ],
[2, 0, 0, 3, 0, 0.0, 0, 1.5, 0.75, 1.2, 0.6 ],
[2, 0, 0, 3, 0, 0.0, 0, 1.5, 0.75, 1.2, 0.6 ],
[2, 0, 0, 3, 0, 0.0, 0, 1.5, 0.75, 1.2, 0.6 ],
[2, 0, 0, 3, 0, 0.0, 0, 1.5, 0.75, 1.2, 0.6 ],
[2, 0, 0, 3, 0, 0.0, 0, 1.5, 0.75, 1.2, 0.6 ],
[2, 0, 0, 3, 0, 0.0, 0, 1.5, 0.75, 1.2, 0.6 ],
[2, 0, 0, 3, 0, 0.0, 0, 1.5, 0.75, 1.2, 0.6 ],
[2, 0, 0, 3, 0, 0.0, 0, 1.5, 0.75, 1.2, 0.6 ],
[2, 0, 0, 3, 0, 0.0, 0, 1.5, 0.75, 1.2, 0.6 ],
[2, 0, 0, 3, 0, 0.0, 0, 1.5, 0.75, 1.2, 0.6 ],
[2, 0, 0, 3, 0, 0.0, 0, 1.5, 0.75, 1.2, 0.6 ],
[2, 0, 0, 3, 0, 0.0, 0, 1.5, 0.75, 1.2, 0.6 ],
[2, 0, 0, 3, 0, 0.0, 0, 1.5, 0.75, 1.2, 0.6 ],
[2, 0, 0, 3, 0, 0.0, 0, 1.5, 0.75, 1.2, 0.6 ],
[2, 0, 0, 3, 0, 0.0, 0, 1.5, 0.75, 1.2, 0.6 ],
[2, 0, 0, 3, 0, 0.0, 0, 1.5, 0.75, 1.2, 0.6 ],
[2, 0, 0, 3, 0, 0.0, 0, 1.5, 0.75, 1.2, 0.6 ],
[2, 0, 0, 3, 0, 0.0, 0, 1.5, 0.75, 1.2, 0.6 ],
[2, 0, 0, 3, 0, 0.0, 0, 1.5, 0.75, 1.2, 0.6 ],
[2, 0, 0, 3, 0, 0.0, 0, 1.5, 0.75, 1.2, 0.6 ],
[2, 0, 0, 3, 0, 0.0, 0, 1.5, 0.75, 1.2, 0.6 ],
[2, 0, 0, 3, 0, 0.0, 0, 1.5, 0.75, 1.2, 0.6 ],
[2, 0, 0, 3, 0, 0.0, 0, 1.5, 0.75, 1.2, 0.6 ],
[2, 0, 0, 3, 0, 0.0, 0, 1.5, 0.75, 1.2, 0.6 ],
[2, 0, 0, 3, 0, 0.0, 0, 1.5, 0.75, 1.2, 0.6 ],
[2, 0, 0, 3, 0, 0.0, 0, 1.5, 0.75, 1.2, 0.6 ],
[2, 0, 0, 3, 0, 0.0, 0, 1.5, 0.75, 1.2, 0.6 ],
[2, 0, 0, 3, 0, 0.0, 0, 1.5, 0.75, 1.2, 0.6 ],
[2, 0, 0, 3, 0, 0.0, 0, 1.5, 0.75, 1.2, 0.6 ],
[2, 0, 0, 3, 0, 0.0, 0, 1.5, 0.75, 1.2, 0.6 ],
[2, 0, 0, 3, 0, 0.0, 0, 1.5, 0.75, 1.2, 0.6 ],
[2, 0, 0, 3, 0, 0.0, 0, 1.5, 0.75, 1.2, 0.6 ],
[2, 0, 0, 3, 0, 0.0, 0, 1.5, 0.75, 1.2, 0.6 ],
[2, 0, 0, 3, 0, 0.0, 0, 1.5, 0.75, 1.2, 0.6 ],
[2, 0, 0, 3, 0, 0.0, 0, 1.5, 0.75, 1.2, 0.6 ],
[2, 0, 0, 3, 0, 0.0, 0, 1.5, 0.75, 1.2, 0.6 ],
[2, 0, 0, 3, 0, 0.0, 0, 1.5, 0.75, 1.2, 0.6 ],
[2, 0, 0, 3, 0, 0.0, 0, 1.5, 0.75, 1.2, 0.6 ],
[2, 0, 0, 3, 0, 0.0, 0, 1.5, 0.75, 1.2, 0.6 ],
[2, 0, 0, 3, 0, 0.0, 0, 1.5, 0.75, 1.2, 0.6 ],
[2, 0, 0, 3, 0, 0.0, 0, 1.5, 0.75, 1.2, 0.6 ],
[2, 0, 0, 3, 0, 0.0, 0, 1.5, 0.75, 1.2, 0.6 ],
[2, 0, 0, 3, 0, 0.0, 0, 1.5, 0.75, 1.2, 0.6 ],
[2, 0, 0, 3, 0, 0.0, 0, 1.5, 0.75, 1.2, 0.6 ],
[2, 0, 0, 3, 0, 0.0, 0, 1.5, 0.75, 1.2, 0.6 ],
[2, 0, 0, 3, 0, 0.0, 0, 1.5, 0.75, 1.2, 0.6 ],
[2, 0, 0, 3, 0, 0.0, 0, 1.5, 0.75, 1.2, 0.6 ],
[2, 0, 0, 3, 0, 0.0, 0, 1.5, 0.75, 1.2, 0.6 ],
[2, 0, 0, 3, 0, 0.0, 0, 1.5, 0.75, 1.2, 0.6 ],
[2, 0, 0, 3, 0, 0.0, 0, 1.5, 0.75, 1.2, 0.6 ],
[2, 0, 0, 3, 0, 0.0, 0, 1.5, 0.75, 1.2, 0.6 ],
[2, 0, 0, 3, 0, 0.0, 0, 1.5, 0.75, 1.2, 0.6 ],
[2, 0, 0, 3, 0, 0.0, 0, 1.5, 0.75, 1.2, 0.6 ],
[2, 0, 0, 3, 0, 0.0, 0, 1.5, 0.75, 1.2, 0.6 ],
[2, 0, 0, 3, 0, 0.0, 0, 1.5, 0.75, 1.2, 0.6 ],
[2, 0, 0, 3, 0, 0.0, 0, 1.5, 0.75, 1.2, 0.6 ],
[2, 0, 0, 3, 0, 0.0, 0, 1.5, 0.75, 1.2, 0.6 ],
[2, 0, 0, 3, 0, 0.0, 0, 1.5, 0.75, 1.2, 0.6 ],
[2, 0, 0, 3, 0, 0.0, 0, 1.5, 0.75, 1.2, 0.6 ],
[2, 0, 0, 3, 0, 0.0, 0, 1.5, 0.75, 1.2, 0.6 ],
[2, 0, 0, 3, 0, 0.0, 0, 1.5, 0.75, 1.2, 0.6 ],
[2, 0, 0, 3, 0, 0.0, 0, 1.5, 0.75, 1.2, 0.6 ],
[2, 0, 0, 3, 0, 0.0, 0, 0.75, 0.375, 0.6, 0.3 ],
[2, 0, 0, 3, 0, 0.0, 0, 0.75, 0.375, 0.6, 0.3 ],
[2, 0, 0, 3, 0, 0.0, 0, 0.75, 0.375, 0.6, 0.3 ],
[2, 0, 0, 3, 0, 0.0, 0, 0.75, 0.375, 0.6, 0.3 ],
[2, 0, 0, 3, 0, 0.0, 0, 0.75, 0.375, 0.6, 0.3 ],
[2, 0, 0, 3, 0, 0.0, 0, 0.75, 0.375, 0.6, 0.3 ],
[2, 0, 0, 3, 0, 0.0, 0, 0.75, 0.375, 0.6, 0.3 ],
[2, 0, 0, 3, 0, 0.0, 0, 0.75, 0.375, 0.6, 0.3 ],
[2, 0, 0, 3, 0, 0.0, 0, 0.75, 0.375, 0.6, 0.3 ],
[2, 0, 0, 3, 0, 0.0, 0, 0.75, 0.375, 0.6, 0.3 ],
[2, 0, 0, 3, 0, 0.0, 0, 0.75, 0.375, 0.6, 0.3 ],
[2, 0, 0, 3, 0, 0.0, 0, 0.75, 0.375, 0.6, 0.3 ],
[2, 0, 0, 3, 0, 0.0, 0, 0.75, 0.375, 0.6, 0.3 ],
[2, 0, 0, 3, 0, 0.0, 0, 0.75, 0.375, 0.6, 0.3 ],
[2, 0, 0, 3, 0, 0.0, 0, 0.75, 0.375, 0.6, 0.3 ],
[2, 0, 0, 3, 0, 0.0, 0, 0.75, 0.375, 0.6, 0.3 ],
[2, 0, 0, 3, 0, 0.0, 0, 0.75, 0.375, 0.6, 0.3 ],
[2, 0, 0, 3, 0, 0.0, 0, 0.75, 0.375, 0.6, 0.3 ],
[2, 0, 0, 3, 0, 0.0, 0, 0.75, 0.375, 0.6, 0.3 ],
[2, 0, 0, 3, 0, 0.0, 0, 0.75, 0.375, 0.6, 0.3 ],
[2, 0, 0, 3, 0, 0.0, 0, 0.75, 0.375, 0.6, 0.3 ],
[2, 0, 0, 3, 0, 0.0, 0, 0.75, 0.375, 0.6, 0.3 ],
[2, 0, 0, 3, 0, 0.0, 0, 0.75, 0.375, 0.6, 0.3 ],
[2, 0, 0, 3, 0, 0.0, 0, 0.75, 0.375, 0.6, 0.3 ],
[2, 0, 0, 3, 0, 0.0, 0, 0.75, 0.375, 0.6, 0.3 ],
[2, 0, 0, 3, 0, 0.0, 0, 0.75, 0.375, 0.6, 0.3 ],
[2, 0, 0, 3, 0, 0.0, 0, 0.75, 0.375, 0.6, 0.3 ],
[2, 0, 0, 3, 0, 0.0, 0, 0.75, 0.375, 0.6, 0.3 ],
[2, 0, 0, 3, 0, 0.0, 0, 0.75, 0.375, 0.6, 0.3 ],
[2, 0, 0, 3, 0, 0.0, 0, 0.75, 0.375, 0.6, 0.3 ],
[2, 0, 0, 3, 0, 0.0, 0, 0.75, 0.375, 0.6, 0.3 ],
[2, 0, 0, 3, 0, 0.0, 0, 0.75, 0.375, 0.6, 0.3 ],
[2, 0, 0, 3, 0, 0.0, 0, 0.75, 0.375, 0.6, 0.3 ],
[2, 0, 0, 3, 0, 0.0, 0, 0.75, 0.375, 0.6, 0.3 ],
[2, 0, 0, 3, 0, 0.0, 0, 0.75, 0.375, 0.6, 0.3 ],
[2, 0, 0, 3, 0, 0.0, 0, 0.75, 0.375, 0.6, 0.3 ],
[2, 0, 0, 3, 0, 0.0, 0, 0.75, 0.375, 0.6, 0.3 ],
[2, 0, 0, 3, 0, 0.0, 0, 0.75, 0.375, 0.6, 0.3 ],
[2, 0, 0, 3, 0, 0.0, 0, 0.75, 0.375, 0.6, 0.3 ],
[2, 0, 0, 3, 0, 0.0, 0, 0.75, 0.375, 0.6, 0.3 ],
[2, 0, 0, 3, 0, 0.0, 0, 0.75, 0.375, 0.6, 0.3 ],
[2, 0, 0, 3, 0, 0.0, 0, 0.75, 0.375, 0.6, 0.3 ],
[2, 0, 0, 3, 0, 0.0, 0, 0.75, 0.375, 0.6, 0.3 ],
[2, 0, 0, 3, 0, 0.0, 0, 0.75, 0.375, 0.6, 0.3 ],
[2, 0, 0, 3, 0, 0.0, 0, 0.75, 0.375, 0.6, 0.3 ],
[2, 0, 0, 3, 0, 0.0, 0, 0.75, 0.375, 0.6, 0.3 ],
[2, 0, 0, 3, 0, 0.0, 0, 0.75, 0.375, 0.6, 0.3 ],
[2, 0, 0, 3, 0, 0.0, 0, 0.75, 0.375, 0.6, 0.3 ],
[2, 0, 0, 3, 0, 0.0, 0, 0.75, 0.375, 0.6, 0.3 ],
[2, 0, 0, 3, 0, 0.0, 0, 0.75, 0.375, 0.6, 0.3 ],
[2, 0, 0, 3, 0, 0.0, 0, 0.75, 0.375, 0.6, 0.3 ],
[2, 0, 0, 3, 0, 0.0, 0, 0.75, 0.375, 0.6, 0.3 ],
[2, 0, 0, 3, 0, 0.0, 0, 0.75, 0.375, 0.6, 0.3 ],
[2, 0, 0, 3, 0, 0.0, 0, 0.75, 0.375, 0.6, 0.3 ],
[2, 0, 0, 3, 0, 0.0, 0, 0.75, 0.375, 0.6, 0.3 ],
[2, 0, 0, 3, 0, 0.0, 0, 0.75, 0.375, 0.6, 0.3 ],
[2, 0, 0, 3, 0, 0.0, 0, 0.75, 0.375, 0.6, 0.3 ],
[2, 0, 0, 3, 0, 0.0, 0, 0.75, 0.375, 0.6, 0.3 ],
[2, 0, 0, 3, 0, 0.0, 0, 0.75, 0.375, 0.6, 0.3 ],
[2, 0, 0, 3, 0, 0.0, 0, 0.75, 0.375, 0.6, 0.3 ],
[2, 0, 0, 3, 0, 0.0, 0, 0.75, 0.375, 0.6, 0.3 ],
[2, 0, 0, 3, 0, 0.0, 0, 0.75, 0.375, 0.6, 0.3 ],
[2, 0, 0, 3, 0, 0.0, 0, 0.75, 0.375, 0.6, 0.3 ],
[2, 0, 0, 3, 0, 0.0, 0, 0.75, 0.375, 0.6, 0.3 ],
[2, 0, 0, 3, 0, 0.0, 0, 0.75, 0.375, 0.6, 0.3 ],
[2, 0, 0, 3, 0, 0.0, 0, 0.75, 0.375, 0.6, 0.3 ],
[2, 0, 0, 3, 0, 0.0, 0, 0.75, 0.375, 0.6, 0.3 ],
[2, 0, 0, 3, 0, 0.0, 0, 0.75, 0.375, 0.6, 0.3 ],
[2, 0, 0, 3, 0, 0.0, 0, 0.75, 0.375, 0.6, 0.3 ],
[2, 0, 0, 3, 0, 0.0, 0, 0.75, 0.375, 0.6, 0.3 ],
[2, 0, 0, 3, 0, 0.0, 0, 0.75, 0.375, 0.6, 0.3 ],
[2, 0, 0, 3, 0, 0.0, 0, 0.75, 0.375, 0.6, 0.3 ],
[2, 0, 0, 3, 0, 0.0, 0, 0.75, 0.375, 0.6, 0.3 ],
[2, 0, 0, 3, 0, 0.0, 0, 0.75, 0.375, 0.6, 0.3 ],
[2, 0, 0, 3, 0, 0.0, 0, 0.75, 0.375, 0.6, 0.3 ],
[2, 0, 0, 3, 0, 0.0, 0, 0.75, 0.375, 0.6, 0.3 ],
[2, 0, 0, 3, 0, 0.0, 0, 0.75, 0.375, 0.6, 0.3 ],
[2, 0, 0, 3, 0, 0.0, 0, 0.75, 0.375, 0.6, 0.3 ],
[2, 0, 0, 3, 0, 0.0, 0, 0.75, 0.375, 0.6, 0.3 ],
[2, 0, 0, 3, 0, 0.0, 0, 0.75, 0.375, 0.6, 0.3 ],
[2, 0, 0, 3, 0, 0.0, 0, 0.75, 0.375, 0.6, 0.3 ],
[2, 0, 0, 3, 0, 0.0, 0, 0.75, 0.375, 0.6, 0.3 ],
[2, 0, 0, 3, 0, 0.0, 0, 0.75, 0.375, 0.6, 0.3 ],
[2, 0, 0, 3, 0, 0.0, 0, 0.75, 0.375, 0.6, 0.3 ],
[2, 0, 0, 3, 0, 0.0, 0, 0.75, 0.375, 0.6, 0.3 ],
])
return ppc
| 126.029021
| 192
| 0.304732
| 20,130
| 104,226
| 1.577695
| 0.045753
| 0.352656
| 0.379451
| 0.419912
| 0.603797
| 0.546869
| 0.542744
| 0.527315
| 0.52508
| 0.52508
| 0
| 0.585623
| 0.481857
| 104,226
| 827
| 193
| 126.029021
| 0.002463
| 0
| 0
| 0.326481
| 0
| 0
| 0.000326
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0.001209
| false
| 0
| 0.001209
| 0
| 0.003628
| 0
| 0
| 0
| 1
| null | 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 7
|
14401828cb03b500a3a3d768a9a177cc1604e3a6
| 2,963
|
py
|
Python
|
conv_shapes/__init__.py
|
jacobhepkema/conv_shapes
|
774b9a4580ad04022f5e871bc097c7054b95212f
|
[
"MIT"
] | null | null | null |
conv_shapes/__init__.py
|
jacobhepkema/conv_shapes
|
774b9a4580ad04022f5e871bc097c7054b95212f
|
[
"MIT"
] | null | null | null |
conv_shapes/__init__.py
|
jacobhepkema/conv_shapes
|
774b9a4580ad04022f5e871bc097c7054b95212f
|
[
"MIT"
] | null | null | null |
import math
def brute_force_output_shape_conv(input_size: int,
output_size: int,
max_stride: int = 10,
max_kernel: int = 10,
max_padding: int = 10,
set_stride = None,
set_kernel = None,
set_padding = None) -> None:
r"""
Brute force searches solutions for regular convolution
setup given input and output dimensionality.
You can fix stride, kernel, or padding with set_stride,
set_kernel, and set_padding respectively.
If you have a multidimensional case, run it for each
dimension separately; it generalises.
Formula from: https://arxiv.org/pdf/1603.07285.pdf
"""
assert set_stride != 0, "Stride cannot be 0"
assert set_kernel != 0, "Kernel size cannot be 0"
stride_range = [set_stride] if set_stride != None else range(1, max_stride)
kernel_range = [set_kernel] if set_kernel != None else range(1, max_kernel)
padding_range = [set_padding] if set_padding != None else range(0, max_padding)
for i in stride_range:
for j in kernel_range:
for q in padding_range:
if math.floor(((input_size-j+(2*q))/i)+1) == output_size:
print("s: " + str(i) + ", k: " + str(j) + ", p: " + str(q))
def brute_force_output_shape_transposed_conv(input_size: int,
output_size: int,
max_stride: int = 10,
max_kernel: int = 10,
max_padding: int = 10,
set_stride = None,
set_kernel = None,
set_padding = None) -> None:
r"""
Brute force searches solutions for transposed convolution
setup given input and output dimensionality.
You can fix stride, kernel, or padding with set_stride,
set_kernel, and set_padding respectively.
If you have a multidimensional case, run it for each
dimension separately; it generalises.
Formula from: https://arxiv.org/pdf/1603.07285.pdf
"""
assert set_stride != 0, "Stride cannot be 0"
assert set_kernel != 0, "Kernel size cannot be 0"
stride_range = [set_stride] if set_stride != None else range(1, max_stride)
kernel_range = [set_kernel] if set_kernel != None else range(1, max_kernel)
padding_range = [set_padding] if set_padding != None else range(0, max_padding)
for i in stride_range:
for j in kernel_range:
for q in padding_range:
if i * (input_size - 1) + j - (2 * q) == output_size:
print("s: " + str(i) + ", k: " + str(j) + ", p: " + str(q))
| 49.383333
| 83
| 0.538306
| 361
| 2,963
| 4.227147
| 0.199446
| 0.058978
| 0.051114
| 0.036697
| 0.951507
| 0.920052
| 0.920052
| 0.920052
| 0.920052
| 0.920052
| 0
| 0.026087
| 0.379008
| 2,963
| 59
| 84
| 50.220339
| 0.803261
| 0.236247
| 0
| 0.820513
| 0
| 0
| 0.049451
| 0
| 0
| 0
| 0
| 0
| 0.102564
| 1
| 0.051282
| false
| 0
| 0.025641
| 0
| 0.076923
| 0.051282
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 7
|
1454cb1f6c7f069927b7bd3f677b499bc9e58f3a
| 11,719
|
py
|
Python
|
backend/app/linemessage.py
|
CE63-26-Prawny/prawny
|
ac1f394204ece3078303c973af0155985ed65dc7
|
[
"MIT"
] | null | null | null |
backend/app/linemessage.py
|
CE63-26-Prawny/prawny
|
ac1f394204ece3078303c973af0155985ed65dc7
|
[
"MIT"
] | null | null | null |
backend/app/linemessage.py
|
CE63-26-Prawny/prawny
|
ac1f394204ece3078303c973af0155985ed65dc7
|
[
"MIT"
] | null | null | null |
import json
import os
#linebot
from linebot import LineBotApi
from linebot.models import TextSendMessage, FlexSendMessage
#db
import psycopg2
from . import crud
from datetime import datetime
#global variable
line_bot_api = LineBotApi(os.environ.get('PRAWNY_LINE_CHANNEL_ACCESS_TOKEN'))
def pushMessage(owner_id: str, name_device:str, device_id: str, condition: str, value: float, sensor: str, msg: str):
indexSensor = 0
if sensor == "temp":
indexSensor = 2
elif sensor == "ph":
indexSensor = 3
elif sensor == "doxy":
indexSensor = 4
else:
indexSensor = 5
conn=psycopg2.connect(
host=os.environ.get('PRAWNY_DB_HOST'),
port=os.environ.get('PRAWNY_DB_PORT'),
database=os.environ.get('PRAWNY_DB_NAME'),
user=os.environ.get('PRAWNY_DB_USER'),
password=os.environ.get('PRAWNY_DB_PW')
)
cur = conn.cursor()
cur.execute(f"SELECT * FROM data WHERE device_id = '{device_id}' ORDER BY time DESC LIMIT 2")
data = cur.fetchone()[indexSensor]
conn.commit()
cur.close()
conn.close()
msg = '''{
"type":"bubble",
"body":{
"type":"box",
"layout":"vertical",
"spacing":"md",
"contents":[
{
"type":"text",
"text":"%s",
"weight":"bold",
"size":"xl",
"contents":[
]
},
{
"type":"box",
"layout":"vertical",
"spacing":"sm",
"contents":[
{
"type":"box",
"layout":"baseline",
"contents":[
{
"type":"text",
"text":"Sensor",
"weight":"bold",
"margin":"sm",
"contents":[
]
},
{
"type":"text",
"text":"Value",
"weight":"bold",
"color":"#000000FF",
"align":"end",
"contents":[
]
}
]
},
{
"type":"box",
"layout":"baseline",
"contents":[
{
"type":"text",
"text":"%s",
"weight":"regular",
"margin":"sm",
"contents":[
]
},
{
"type":"text",
"text":"%s",
"color":"#000000FF",
"align":"end",
"contents":[
]
}
]
}
]
}
]
},
"footer":{
"type":"box",
"layout":"vertical",
"contents":[
{
"type":"separator",
"margin":"md",
"color":"#000000FF"
},
{
"type":"text",
"text":"%s",
"weight":"regular",
"align":"start",
"margin":"md",
"contents":[
]
}
]
}
}'''%(name_device,sensor,str(data),msg)
if condition == 'max':
if data > value:
res = json.loads(msg)
line_bot_api.push_message(owner_id,FlexSendMessage(alt_text="warning",contents=res))
elif condition == 'min':
if data < value:
res = json.loads(msg)
line_bot_api.push_message(owner_id,FlexSendMessage(alt_text="warning",contents=res))
def pushMessagebyTime(owner_id: str, id: str, device_id: str, msg: str):
conn=psycopg2.connect(
host=os.environ.get('PRAWNY_DB_HOST'),
port=os.environ.get('PRAWNY_DB_PORT'),
database=os.environ.get('PRAWNY_DB_NAME'),
user=os.environ.get('PRAWNY_DB_USER'),
password=os.environ.get('PRAWNY_DB_PW')
)
cur = conn.cursor()
cur.execute(f"SELECT * FROM data WHERE device_id = '{device_id}' ORDER BY time DESC LIMIT 2")
data = cur.fetchone()
conn.commit()
cur.close()
conn.close()
msg='''
{
"type":"bubble",
"size":"giga",
"direction":"ltr",
"body":{
"type":"box",
"layout":"vertical",
"spacing":"md",
"contents":[
{
"type":"text",
"text":"%s",
"weight":"bold",
"size":"xl",
"contents":[
]
},
{
"type":"box",
"layout":"vertical",
"spacing":"sm",
"contents":[
{
"type":"box",
"layout":"baseline",
"contents":[
{
"type":"text",
"text":"Sensor",
"weight":"bold",
"margin":"sm",
"contents":[
]
},
{
"type":"text",
"text":"Value",
"weight":"bold",
"color":"#000000FF",
"align":"end",
"contents":[
]
}
]
},
{
"type":"box",
"layout":"baseline",
"contents":[
{
"type":"text",
"text":"Temperature",
"weight":"regular",
"margin":"sm",
"contents":[
]
},
{
"type":"text",
"text":"%s",
"color":"#000000FF",
"align":"end",
"contents":[
]
}
]
},
{
"type":"box",
"layout":"baseline",
"contents":[
{
"type":"text",
"text":"pH",
"weight":"regular",
"margin":"sm",
"contents":[
]
},
{
"type":"text",
"text":"%s",
"color":"#000000FF",
"align":"end",
"contents":[
]
}
]
},
{
"type":"box",
"layout":"baseline",
"contents":[
{
"type":"text",
"text":"Dissolved Oxygen",
"weight":"regular",
"margin":"sm",
"contents":[
]
},
{
"type":"text",
"text":"%s",
"color":"#000000FF",
"align":"end",
"contents":[
]
}
]
},
{
"type":"box",
"layout":"baseline",
"contents":[
{
"type":"text",
"text":"Electrical Conductivity",
"weight":"regular",
"margin":"sm",
"contents":[
]
},
{
"type":"text",
"text":"%s",
"color":"#000000FF",
"align":"end",
"contents":[
]
}
]
}
]
}
]
},
"footer":{
"type":"box",
"layout":"vertical",
"contents":[
{
"type":"separator",
"margin":"md",
"color":"#000000FF"
},
{
"type":"text",
"text":"%s",
"weight":"regular",
"size":"lg",
"align":"start",
"margin":"md",
"contents":[
]
}
]
}
}'''%(device_id,sensor,str(data[2]),str(data[3]),str(data[4]),str(data[5]),msg)
res = json.loads(msg)
line_bot_api.push_message(owner_id,FlexSendMessage(alt_text="warning",contents=res))
def test(device_id: str, ownder_id: str,msg: str):
conn=psycopg2.connect(
host=os.environ.get('PRAWNY_DB_HOST'),
port=os.environ.get('PRAWNY_DB_PORT'),
database=os.environ.get('PRAWNY_DB_NAME'),
user=os.environ.get('PRAWNY_DB_USER'),
password=os.environ.get('PRAWNY_DB_PW')
)
cur = conn.cursor()
cur.execute(f"SELECT * FROM data WHERE device_id = '{device_id}' ORDER BY time DESC LIMIT 2")
value = cur.fetchone()
conn.commit()
cur.close()
conn.close()
print(value)
line_bot_api.push_message(ownder_id,TextSendMessage(text=f'message: {msg}, value={str(value[3])}'))
| 33.387464
| 117
| 0.282277
| 678
| 11,719
| 4.775811
| 0.174041
| 0.100062
| 0.066708
| 0.088944
| 0.771464
| 0.748919
| 0.74861
| 0.74861
| 0.736875
| 0.712168
| 0
| 0.015122
| 0.599369
| 11,719
| 351
| 118
| 33.387464
| 0.674547
| 0.002133
| 0
| 0.573209
| 0
| 0
| 0.797297
| 0.006329
| 0
| 0
| 0
| 0
| 0
| 1
| 0.009346
| false
| 0.009346
| 0.021807
| 0
| 0.031153
| 0.003115
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 7
|
1470f2b52467f1252a99ded25394130bd5ecd107
| 1,306
|
py
|
Python
|
Packages/Patterns_Package/symbols/filled_symbols/Opposite_pattern_of_Two_Normal_and_Reverse_Isosceles_triangles.py
|
saribalarakeshreddy/Python-3.9.0
|
25b4c74feb2a27b91e69aa82becde23e356e82c4
|
[
"MIT"
] | null | null | null |
Packages/Patterns_Package/symbols/filled_symbols/Opposite_pattern_of_Two_Normal_and_Reverse_Isosceles_triangles.py
|
saribalarakeshreddy/Python-3.9.0
|
25b4c74feb2a27b91e69aa82becde23e356e82c4
|
[
"MIT"
] | null | null | null |
Packages/Patterns_Package/symbols/filled_symbols/Opposite_pattern_of_Two_Normal_and_Reverse_Isosceles_triangles.py
|
saribalarakeshreddy/Python-3.9.0
|
25b4c74feb2a27b91e69aa82becde23e356e82c4
|
[
"MIT"
] | null | null | null |
def for_Opposite_pattern_of_Two_Normal_and_Reverse_Isosceles_triangles():
""" pattern for :Opposite_pattern_of_Two_Normal_and_Reverse_Isosceles_triangles using for loop"""
for i in range(4):
for j in range(7):
if i==0 or j==3 or i==1 and j%6!=0 or i==2 and j in (2,4):
print('*',end=' ')
else:
print(' ', end=' ')
print()
for i in range(4):
for j in range(7):
if i+j>=3 and j-i<=3:
print('*',end=' ')
else:
print(' ', end=' ')
print()
def while_Opposite_pattern_of_Two_Normal_and_Reverse_Isosceles_triangles():
""" pattern for :Opposite_pattern_of_Two_Normal_and_Reverse_Isosceles_triangles using while loop"""
i=0
while i<4:
j=0
while j<7:
if i==0 or j==3 or i==1 and j%6!=0 or i==2 and j in (2,4):
print('*',end=' ')
else:
print(' ', end=' ')
j+=1
i+=1
print()
i=0
while i<4:
j=0
while j<7:
if i+j>=3 and j-i<=3:
print('*',end=' ')
else:
print(' ', end=' ')
j+=1
i+=1
print()
| 30.372093
| 104
| 0.447167
| 180
| 1,306
| 3.055556
| 0.166667
| 0.116364
| 0.123636
| 0.145455
| 0.950909
| 0.950909
| 0.932727
| 0.932727
| 0.932727
| 0.932727
| 0
| 0.047368
| 0.41807
| 1,306
| 42
| 105
| 31.095238
| 0.676316
| 0.140123
| 0
| 0.947368
| 0
| 0
| 0.014981
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0.052632
| false
| 0
| 0
| 0
| 0.052632
| 0.315789
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 7
|
14872390216ddf732c2614ae146f4fe8343e10f0
| 34,783
|
py
|
Python
|
tests/test_executor.py
|
tidalwave-it/solidblue3-p-src
|
7cd93af3f45c74469cbca4b93fb03728fd2c2338
|
[
"Apache-2.0"
] | null | null | null |
tests/test_executor.py
|
tidalwave-it/solidblue3-p-src
|
7cd93af3f45c74469cbca4b93fb03728fd2c2338
|
[
"Apache-2.0"
] | null | null | null |
tests/test_executor.py
|
tidalwave-it/solidblue3-p-src
|
7cd93af3f45c74469cbca4b93fb03728fd2c2338
|
[
"Apache-2.0"
] | null | null | null |
#!/usr/bin/env python3
# -*- coding: utf-8 -*-
# SolidBlue III - Open source data manager.
#
# __author__ = "Fabrizio Giudici"
# __copyright__ = "Copyright © 2020 by Fabrizio Giudici"
# __credits__ = ["Fabrizio Giudici"]
# __license__ = "Apache v2"
# __version__ = "1.0-ALPHA-4-SNAPSHOT"
# __maintainer__ = "Fabrizio Giudici"
# __email__ = "fabrizio.giudici@tidalwave.it"
# __status__ = "Prototype"
import os
import unittest
from pathlib import Path
from executor import Executor
class TestExecutor(unittest.TestCase):
#
#
#
def test_special_readline(self):
actual = []
with open(self.__test_resource('drutil/drutil.log'), 'rb') as file:
while True:
string = Executor.special_read_line(file, 'utf-8')
if string == '':
break
actual += [string]
expected = ['Burning Image to Disc: /tmp/SolidBlue/FG-2019-0001,0002 #2.dmg\n', '\n',
'Please insert blank or appendable media in (null) CDDVDW SE-208DB.\n', 'Preparing... done.\r', ' \r', '\r', ' \r', '\r', ' \r', '\r',
' \r', '\r', ' \r', '\r', ' \r', '\r', ' \r', '\r', ' \r', '\r', ' \r', '\r', ' \r', '\r', ' \r', '\r', ' \r', '\r', ' \r', '\r', ' \r',
'\r', ' \r', '\r', ' \r', '\r', ' \r', '\r', ' \r', 'Writing track 1 ... [ ] 1% \r',
' \r', '\r', ' \r', '\r', ' \r', '\r', ' \r', '\r', ' \r', '\r', ' \r', '\r', ' \r', '\r', ' \r', '\r', ' \r', '\r', ' \r', '\r', ' \r',
'\r', ' \r', '\r', ' \r', '\r', ' \r', '\r', ' \r', '\r', ' \r', '\r', ' \r', '\r', ' \r', '\r', ' \r', '\r', ' \r', '\r', ' \r', '\r',
' \r', '\r', ' \r', '\r', ' \r', '\r', ' \r', '\r', ' \r', '\r', ' \r', '\r', ' \r', '\r', ' \r', '\r', ' \r', '\r', ' \r', '\r', ' \r',
'\r', ' \r', '\r', ' \r', '\r', ' \r', '\r', ' \r', '\r', ' \r', '\r', ' \r', '\r', ' \r', '\r', ' \r', '\r', ' \r', '\r', ' \r', '\r',
' \r', '\r', ' \r', '\r', ' \r', '\r', ' \r', '\r', ' \r', '\r', ' \r', '\r', ' \r', '\r', ' \r', '\r', ' \r', '\r', ' \r', '\r', ' \r',
'\r', ' \r', '\r', ' \r', '\r', ' \r', '* ] 1% \r', ' \r', '\r', ' \r', '\r', ' \r',
'2% \r', ' \r', '\r', ' \r', '\r', ' \r', '3% \r', ' \r', '\r', ' \r', '\r', ' \r', '\r', ' \r', '\r', ' \r', '\r', ' \r', '\r', ' \r',
'\r', ' \r', '\r', ' \r', '\r', ' \r', '\r', ' \r', '\r', ' \r', '\r', ' \r', '\r', ' \r', '\r', ' \r', '\r', ' \r', '\r', ' \r', '\r',
' \r', '\r', ' \r', '\r', ' \r', '\r', ' \r', '\r', ' \r', '\r', ' \r', '\r', ' \r', '\r', ' \r', '\r', ' \r', '\r', ' \r', '\r', ' \r',
'\r', ' \r', '\r', ' \r', '\r', ' \r', '\r', ' \r', '\r', ' \r', '\r', ' \r', '\r', ' \r', '\r', ' \r', '\r', ' \r', '\r', ' \r', '\r',
' \r', '\r', ' \r', '\r', ' \r', '\r', ' \r', '\r', ' \r', '\r', ' \r', '\r', ' \r', '\r', ' \r', '\r', ' \r', '\r', ' \r', '\r', ' \r',
'\r', ' \r', '\r', ' \r', '\r', ' \r', '\r', ' \r', '\r', ' \r', '\r', ' \r', '* ] 3% \r',
' \r', '\r', ' \r', '\r', ' \r', '4% \r', ' \r', '\r', ' \r', '\r', ' \r', '5% \r', ' \r', '\r', ' \r', '\r', ' \r', '\r', ' \r', '\r',
' \r', '\r', ' \r', '\r', ' \r', '\r', ' \r', '\r', ' \r', '\r', ' \r', '\r', ' \r', '\r', ' \r', '\r', ' \r', '\r', ' \r', '\r', ' \r',
'\r', ' \r', '\r', ' \r', '\r', ' \r', '\r', ' \r', '\r', ' \r', '\r', ' \r', '\r', ' \r', '\r', ' \r', '\r', ' \r', '\r', ' \r', '\r',
' \r', '\r', ' \r', '\r', ' \r', '\r', ' \r', '\r', ' \r', '\r', ' \r', '\r', ' \r', '\r', ' \r', '\r', ' \r', '\r', ' \r', '\r', ' \r',
'\r', ' \r', '\r', ' \r', '\r', ' \r', '\r', ' \r', '\r', ' \r', '\r', ' \r', '\r', ' \r', '\r', ' \r', '\r', ' \r', '\r', ' \r', '\r',
' \r', '\r', ' \r', '\r', ' \r', '\r', ' \r', '\r', ' \r', '\r', ' \r', '\r', ' \r', '\r', ' \r',
'* ] 5% \r', ' \r', '\r', ' \r', '\r', ' \r', '6% \r', ' \r', '\r', ' \r', '\r', ' \r',
'7% \r', ' \r', '\r', ' \r', '\r', ' \r', '\r', ' \r', '\r', ' \r', '\r', ' \r', '\r', ' \r', '\r', ' \r', '\r', ' \r', '\r', ' \r', '\r',
' \r', '\r', ' \r', '\r', ' \r', '\r', ' \r', '\r', ' \r', '\r', ' \r', '\r', ' \r', '\r', ' \r', '\r', ' \r', '\r', ' \r', '\r', ' \r',
'\r', ' \r', '\r', ' \r', '\r', ' \r', '\r', ' \r', '\r', ' \r', '\r', ' \r', '\r', ' \r', '\r', ' \r', '\r', ' \r', '\r', ' \r', '\r',
' \r', '\r', ' \r', '\r', ' \r', '\r', ' \r', '\r', ' \r', '\r', ' \r', '\r', ' \r', '\r', ' \r', '\r', ' \r', '\r', ' \r', '\r', ' \r',
'\r', ' \r', '\r', ' \r', '\r', ' \r', '\r', ' \r', '\r', ' \r', '\r', ' \r', '\r', ' \r', '\r', ' \r', '\r', ' \r', '\r', ' \r', '\r',
' \r', '* ] 7% \r', ' \r', '\r', ' \r', '\r', ' \r', '8% \r', ' \r', '\r', ' \r', '\r', ' \r',
'9% \r', ' \r', '\r', ' \r', '\r', ' \r', '\r', ' \r', '\r', ' \r', '\r', ' \r', '\r', ' \r', '\r', ' \r', '\r', ' \r', '\r', ' \r', '\r',
' \r', '\r', ' \r', '\r', ' \r', '\r', ' \r', '\r', ' \r', '\r', ' \r', '\r', ' \r', '\r', ' \r', '\r', ' \r', '\r', ' \r', '\r', ' \r',
'\r', ' \r', '\r', ' \r', '\r', ' \r', '\r', ' \r', '\r', ' \r', '\r', ' \r', '\r', ' \r', '\r', ' \r', '\r', ' \r', '\r', ' \r', '\r',
' \r', '\r', ' \r', '\r', ' \r', '\r', ' \r', '\r', ' \r', '\r', ' \r', '\r', ' \r', '\r', ' \r', '\r', ' \r', '\r', ' \r', '\r', ' \r',
'\r', ' \r', '\r', ' \r', '\r', ' \r', '\r', ' \r', '\r', ' \r', '\r', ' \r', '\r', ' \r', '\r', ' \r', '\r', ' \r', '\r', ' \r',
'* ] 9% \r', ' \r', '\r', ' \r', '\r', ' \r', '10% \r', ' \r', '\r', ' \r', '\r', ' \r',
'1% \r', ' \r', '\r', ' \r', '\r', ' \r', '\r', ' \r', '\r', ' \r', '\r', ' \r', '\r', ' \r', '\r', ' \r', '\r', ' \r', '\r', ' \r', '\r',
' \r', '\r', ' \r', '\r', ' \r', '\r', ' \r', '\r', ' \r', '\r', ' \r', '\r', ' \r', '\r', ' \r', '\r', ' \r', '\r', ' \r', '\r', ' \r',
'\r', ' \r', '\r', ' \r', '\r', ' \r', '\r', ' \r', '\r', ' \r', '\r', ' \r', '\r', ' \r', '\r', ' \r', '\r', ' \r', '\r', ' \r', '\r',
' \r', '\r', ' \r', '\r', ' \r', '\r', ' \r', '\r', ' \r', '\r', ' \r', '\r', ' \r', '\r', ' \r', '\r', ' \r', '\r', ' \r', '\r', ' \r',
'\r', ' \r', '\r', ' \r', '\r', ' \r', '\r', ' \r', '\r', ' \r', '\r', ' \r', '\r', ' \r', '\r', ' \r', '\r', ' \r', '\r', ' \r',
'* ] 11% \r', ' \r', '\r', ' \r', '\r', ' \r', '2% \r', ' \r', '\r', ' \r', '\r', ' \r',
'3% \r', ' \r', '\r', ' \r', '\r', ' \r', '\r', ' \r', '\r', ' \r', '\r', ' \r', '\r', ' \r', '\r', ' \r', '\r', ' \r', '\r', ' \r', '\r',
' \r', '\r', ' \r', '\r', ' \r', '\r', ' \r', '\r', ' \r', '\r', ' \r', '\r', ' \r', '\r', ' \r', '\r', ' \r', '\r', ' \r', '\r', ' \r',
'\r', ' \r', '\r', ' \r', '\r', ' \r', '\r', ' \r', '\r', ' \r', '\r', ' \r', '\r', ' \r', '\r', ' \r', '\r', ' \r', '\r', ' \r', '\r',
' \r', '\r', ' \r', '\r', ' \r', '\r', ' \r', '\r', ' \r', '\r', ' \r', '\r', ' \r', '\r', ' \r', '\r', ' \r', '\r', ' \r', '\r', ' \r',
'\r', ' \r', '\r', ' \r', '\r', ' \r', '\r', ' \r', '\r', ' \r', '\r', ' \r', '\r', ' \r', '\r', ' \r', '\r', ' \r',
'* ] 13% \r', ' \r', '\r', ' \r', '\r', ' \r', '4% \r', ' \r', '\r', ' \r', '\r', ' \r', '5% \r',
' \r', '\r', ' \r', '\r', ' \r', '\r', ' \r', '\r', ' \r', '\r', ' \r', '\r', ' \r', '\r', ' \r', '\r', ' \r', '\r', ' \r', '\r', ' \r',
'\r', ' \r', '\r', ' \r', '\r', ' \r', '\r', ' \r', '\r', ' \r', '\r', ' \r', '\r', ' \r', '\r', ' \r', '\r', ' \r', '\r', ' \r', '\r',
' \r', '\r', ' \r', '\r', ' \r', '\r', ' \r', '\r', ' \r', '\r', ' \r', '\r', ' \r', '\r', ' \r', '\r', ' \r', '\r', ' \r', '\r', ' \r',
'\r', ' \r', '\r', ' \r', '\r', ' \r', '\r', ' \r', '\r', ' \r', '\r', ' \r', '\r', ' \r', '\r', ' \r', '\r', ' \r', '\r', ' \r', '\r',
' \r', '\r', ' \r', '\r', ' \r', '\r', ' \r', '\r', ' \r', '\r', ' \r', '\r', ' \r', '\r', ' \r',
'* ] 15% \r', ' \r', '\r', ' \r', '\r', ' \r', '6% \r', ' \r', '\r', ' \r', '\r', ' \r', '7% \r',
' \r', '\r', ' \r', '\r', ' \r', '\r', ' \r', '\r', ' \r', '\r', ' \r', '\r', ' \r', '\r', ' \r', '\r', ' \r', '\r', ' \r', '\r', ' \r',
'\r', ' \r', '\r', ' \r', '\r', ' \r', '\r', ' \r', '\r', ' \r', '\r', ' \r', '\r', ' \r', '\r', ' \r', '\r', ' \r', '\r', ' \r', '\r',
' \r', '\r', ' \r', '\r', ' \r', '\r', ' \r', '\r', ' \r', '\r', ' \r', '\r', ' \r', '\r', ' \r', '\r', ' \r', '\r', ' \r', '\r', ' \r',
'\r', ' \r', '\r', ' \r', '\r', ' \r', '\r', ' \r', '\r', ' \r', '\r', ' \r', '\r', ' \r', '\r', ' \r', '\r', ' \r', '\r', ' \r', '\r',
' \r', '\r', ' \r', '\r', ' \r', '\r', ' \r', '\r', ' \r', '\r', ' \r', '\r', ' \r', '* ] 17% \r',
' \r', '\r', ' \r', '\r', ' \r', '8% \r', ' \r', '\r', ' \r', '\r', ' \r', '9% \r', ' \r', '\r', ' \r', '\r', ' \r', '\r', ' \r', '\r',
' \r', '\r', ' \r', '\r', ' \r', '\r', ' \r', '\r', ' \r', '\r', ' \r', '\r', ' \r', '\r', ' \r', '\r', ' \r', '\r', ' \r', '\r', ' \r',
'\r', ' \r', '\r', ' \r', '\r', ' \r', '\r', ' \r', '\r', ' \r', '\r', ' \r', '\r', ' \r', '\r', ' \r', '\r', ' \r', '\r', ' \r', '\r',
' \r', '\r', ' \r', '\r', ' \r', '\r', ' \r', '\r', ' \r', '\r', ' \r', '\r', ' \r', '\r', ' \r', '\r', ' \r', '\r', ' \r', '\r', ' \r',
'\r', ' \r', '\r', ' \r', '\r', ' \r', '\r', ' \r', '\r', ' \r', '\r', ' \r', '\r', ' \r', '\r', ' \r', '\r', ' \r', '\r', ' \r', '\r',
' \r', '\r', ' \r', '* ] 19% \r', ' \r', '\r', ' \r', '\r', ' \r', '\r', ' \r', '20% \r', ' \r',
'\r', ' \r', '\r', ' \r', '1% \r', ' \r', '\r', ' \r', '\r', ' \r', '\r', ' \r', '\r', ' \r', '\r', ' \r', '\r', ' \r', '\r', ' \r', '\r',
' \r', '\r', ' \r', '\r', ' \r', '\r', ' \r', '\r', ' \r', '\r', ' \r', '\r', ' \r', '\r', ' \r', '\r', ' \r', '\r', ' \r', '\r', ' \r',
'\r', ' \r', '\r', ' \r', '\r', ' \r', '\r', ' \r', '\r', ' \r', '\r', ' \r', '\r', ' \r', '\r', ' \r', '\r', ' \r', '\r', ' \r', '\r',
' \r', '\r', ' \r', '\r', ' \r', '\r', ' \r', '\r', ' \r', '\r', ' \r', '\r', ' \r', '\r', ' \r', '\r', ' \r', '\r', ' \r', '\r', ' \r',
'\r', ' \r', '\r', ' \r', '\r', ' \r', '\r', ' \r', '\r', ' \r', '\r', ' \r', '\r', ' \r',
'* ] 21% \r', ' \r', '\r', ' \r', '\r', ' \r', '2% \r', ' \r', '\r', ' \r', '\r', ' \r', '3% \r',
' \r', '\r', ' \r', '\r', ' \r', '\r', ' \r', '\r', ' \r', '\r', ' \r', '\r', ' \r', '\r', ' \r', '\r', ' \r', '\r', ' \r', '\r', ' \r',
'\r', ' \r', '\r', ' \r', '\r', ' \r', '\r', ' \r', '\r', ' \r', '\r', ' \r', '\r', ' \r', '\r', ' \r', '\r', ' \r', '\r', ' \r', '\r',
' \r', '\r', ' \r', '\r', ' \r', '\r', ' \r', '\r', ' \r', '\r', ' \r', '\r', ' \r', '\r', ' \r', '\r', ' \r', '\r', ' \r', '\r', ' \r',
'\r', ' \r', '\r', ' \r', '\r', ' \r', '\r', ' \r', '\r', ' \r', '\r', ' \r', '\r', ' \r', '\r', ' \r', '\r', ' \r', '\r', ' \r', '\r',
' \r', '\r', ' \r', '\r', ' \r', '\r', ' \r', '* ] 23% \r', ' \r', '\r', ' \r', '\r', ' \r', '4% \r',
' \r', '\r', ' \r', '\r', ' \r', '5% \r', ' \r', '\r', ' \r', '\r', ' \r', '\r', ' \r', '\r', ' \r', '\r', ' \r', '\r', ' \r', '\r', ' \r',
'\r', ' \r', '\r', ' \r', '\r', ' \r', '\r', ' \r', '\r', ' \r', '\r', ' \r', '\r', ' \r', '\r', ' \r', '\r', ' \r', '\r', ' \r', '\r',
' \r', '\r', ' \r', '\r', ' \r', '\r', ' \r', '\r', ' \r', '\r', ' \r', '\r', ' \r', '\r', ' \r', '\r', ' \r', '\r', ' \r', '\r', ' \r',
'\r', ' \r', '\r', ' \r', '\r', ' \r', '\r', ' \r', '\r', ' \r', '\r', ' \r', '\r', ' \r', '\r', ' \r', '\r', ' \r', '\r', ' \r', '\r',
' \r', '\r', ' \r', '\r', ' \r', '\r', ' \r', '\r', ' \r', '\r', ' \r', '* ] 25% \r', ' \r', '\r',
' \r', '\r', ' \r', '6% \r', ' \r', '\r', ' \r', '\r', ' \r', '7% \r', ' \r', '\r', ' \r', '\r', ' \r', '\r', ' \r', '\r', ' \r', '\r',
' \r', '\r', ' \r', '\r', ' \r', '\r', ' \r', '\r', ' \r', '\r', ' \r', '\r', ' \r', '\r', ' \r', '\r', ' \r', '\r', ' \r', '\r', ' \r',
'\r', ' \r', '\r', ' \r', '\r', ' \r', '\r', ' \r', '\r', ' \r', '\r', ' \r', '\r', ' \r', '\r', ' \r', '\r', ' \r', '\r', ' \r', '\r',
' \r', '\r', ' \r', '\r', ' \r', '\r', ' \r', '\r', ' \r', '\r', ' \r', '\r', ' \r', '\r', ' \r', '\r', ' \r', '\r', ' \r', '\r', ' \r',
'\r', ' \r', '\r', ' \r', '\r', ' \r', '\r', ' \r', '\r', ' \r', '\r', ' \r', '\r', ' \r', '* ] 27% \r',
' \r', '\r', ' \r', '\r', ' \r', '8% \r', ' \r', '\r', ' \r', '\r', ' \r', '9% \r', ' \r', '\r', ' \r', '\r', ' \r', '\r', ' \r', '\r',
' \r', '\r', ' \r', '\r', ' \r', '\r', ' \r', '\r', ' \r', '\r', ' \r', '\r', ' \r', '\r', ' \r', '\r', ' \r', '\r', ' \r', '\r', ' \r',
'\r', ' \r', '\r', ' \r', '\r', ' \r', '\r', ' \r', '\r', ' \r', '\r', ' \r', '\r', ' \r', '\r', ' \r', '\r', ' \r', '\r', ' \r', '\r',
' \r', '\r', ' \r', '\r', ' \r', '\r', ' \r', '\r', ' \r', '\r', ' \r', '\r', ' \r', '\r', ' \r', '\r', ' \r', '\r', ' \r', '\r', ' \r',
'\r', ' \r', '\r', ' \r', '\r', ' \r', '\r', ' \r', '\r', ' \r', '\r', ' \r', '\r', ' \r', '* ] 29% \r',
' \r', '\r', ' \r', '\r', ' \r', '\r', ' \r', '30% \r', ' \r', '\r', ' \r', '\r', ' \r', '1% \r', ' \r', '\r', ' \r', '\r', ' \r', '\r',
' \r', '\r', ' \r', '\r', ' \r', '\r', ' \r', '\r', ' \r', '\r', ' \r', '\r', ' \r', '\r', ' \r', '\r', ' \r', '\r', ' \r', '\r', ' \r',
'\r', ' \r', '\r', ' \r', '\r', ' \r', '\r', ' \r', '\r', ' \r', '\r', ' \r', '\r', ' \r', '\r', ' \r', '\r', ' \r', '\r', ' \r', '\r',
' \r', '\r', ' \r', '\r', ' \r', '\r', ' \r', '\r', ' \r', '\r', ' \r', '\r', ' \r', '\r', ' \r', '\r', ' \r', '\r', ' \r', '\r', ' \r',
'\r', ' \r', '\r', ' \r', '\r', ' \r', '\r', ' \r', '\r', ' \r', '\r', ' \r', '\r', ' \r', '* ] 31% \r',
' \r', '\r', ' \r', '\r', ' \r', '2% \r', ' \r', '\r', ' \r', '\r', ' \r', '3% \r', ' \r', '\r', ' \r', '\r', ' \r', '\r', ' \r', '\r',
' \r', '\r', ' \r', '\r', ' \r', '\r', ' \r', '\r', ' \r', '\r', ' \r', '\r', ' \r', '\r', ' \r', '\r', ' \r', '\r', ' \r', '\r', ' \r',
'\r', ' \r', '\r', ' \r', '\r', ' \r', '\r', ' \r', '\r', ' \r', '\r', ' \r', '\r', ' \r', '\r', ' \r', '\r', ' \r', '\r', ' \r', '\r',
' \r', '\r', ' \r', '\r', ' \r', '\r', ' \r', '\r', ' \r', '\r', ' \r', '\r', ' \r', '\r', ' \r', '\r', ' \r', '\r', ' \r', '\r', ' \r',
'\r', ' \r', '\r', ' \r', '\r', ' \r', '\r', ' \r', '\r', ' \r', '* ] 33% \r', ' \r', '\r', ' \r', '\r',
' \r', '4% \r', ' \r', '\r', ' \r', '\r', ' \r', '5% \r', ' \r', '\r', ' \r', '\r', ' \r', '\r', ' \r', '\r', ' \r', '\r', ' \r', '\r',
' \r', '\r', ' \r', '\r', ' \r', '\r', ' \r', '\r', ' \r', '\r', ' \r', '\r', ' \r', '\r', ' \r', '\r', ' \r', '\r', ' \r', '\r', ' \r',
'\r', ' \r', '\r', ' \r', '\r', ' \r', '\r', ' \r', '\r', ' \r', '\r', ' \r', '\r', ' \r', '\r', ' \r', '\r', ' \r', '\r', ' \r', '\r',
' \r', '\r', ' \r', '\r', ' \r', '\r', ' \r', '\r', ' \r', '\r', ' \r', '\r', ' \r', '\r', ' \r', '\r', ' \r', '\r', ' \r', '\r', ' \r',
'\r', ' \r', '\r', ' \r', '* ] 35% \r', ' \r', '\r', ' \r', '\r', ' \r', '6% \r', ' \r', '\r', ' \r', '\r',
' \r', '7% \r', ' \r', '\r', ' \r', '\r', ' \r', '\r', ' \r', '\r', ' \r', '\r', ' \r', '\r', ' \r', '\r', ' \r', '\r', ' \r', '\r', ' \r',
'\r', ' \r', '\r', ' \r', '\r', ' \r', '\r', ' \r', '\r', ' \r', '\r', ' \r', '\r', ' \r', '\r', ' \r', '\r', ' \r', '\r', ' \r', '\r',
' \r', '\r', ' \r', '\r', ' \r', '\r', ' \r', '\r', ' \r', '\r', ' \r', '\r', ' \r', '\r', ' \r', '\r', ' \r', '\r', ' \r', '\r', ' \r',
'\r', ' \r', '\r', ' \r', '\r', ' \r', '\r', ' \r', '\r', ' \r', '\r', ' \r', '\r', ' \r', '\r', ' \r',
'* ] 37% \r', ' \r', '\r', ' \r', '\r', ' \r', '8% \r', ' \r', '\r', ' \r', '\r', ' \r', '9% \r', ' \r',
'\r', ' \r', '\r', ' \r', '\r', ' \r', '\r', ' \r', '\r', ' \r', '\r', ' \r', '\r', ' \r', '\r', ' \r', '\r', ' \r', '\r', ' \r', '\r',
' \r', '\r', ' \r', '\r', ' \r', '\r', ' \r', '\r', ' \r', '\r', ' \r', '\r', ' \r', '\r', ' \r', '\r', ' \r', '\r', ' \r', '\r', ' \r',
'\r', ' \r', '\r', ' \r', '\r', ' \r', '\r', ' \r', '\r', ' \r', '\r', ' \r', '\r', ' \r', '\r', ' \r', '\r', ' \r', '\r', ' \r', '\r',
' \r', '\r', ' \r', '\r', ' \r', '\r', ' \r', '\r', ' \r', '\r', ' \r', '* ] 39% \r', ' \r', '\r', ' \r',
'\r', ' \r', '\r', ' \r', '40% \r', ' \r', '\r', ' \r', '\r', ' \r', '1% \r', ' \r', '\r', ' \r', '\r', ' \r', '\r', ' \r', '\r', ' \r',
'\r', ' \r', '\r', ' \r', '\r', ' \r', '\r', ' \r', '\r', ' \r', '\r', ' \r', '\r', ' \r', '\r', ' \r', '\r', ' \r', '\r', ' \r', '\r',
' \r', '\r', ' \r', '\r', ' \r', '\r', ' \r', '\r', ' \r', '\r', ' \r', '\r', ' \r', '\r', ' \r', '\r', ' \r', '\r', ' \r', '\r', ' \r',
'\r', ' \r', '\r', ' \r', '\r', ' \r', '\r', ' \r', '\r', ' \r', '\r', ' \r', '\r', ' \r', '\r', ' \r', '\r', ' \r', '\r', ' \r', '\r',
' \r', '* ] 41% \r', ' \r', '\r', ' \r', '\r', ' \r', '2% \r', ' \r', '\r', ' \r', '\r', ' \r', '3% \r', ' \r',
'\r', ' \r', '\r', ' \r', '\r', ' \r', '\r', ' \r', '\r', ' \r', '\r', ' \r', '\r', ' \r', '\r', ' \r', '\r', ' \r', '\r', ' \r', '\r',
' \r', '\r', ' \r', '\r', ' \r', '\r', ' \r', '\r', ' \r', '\r', ' \r', '\r', ' \r', '\r', ' \r', '\r', ' \r', '\r', ' \r', '\r', ' \r',
'\r', ' \r', '\r', ' \r', '\r', ' \r', '\r', ' \r', '\r', ' \r', '\r', ' \r', '\r', ' \r', '\r', ' \r', '\r', ' \r', '\r', ' \r', '\r',
' \r', '\r', ' \r', '\r', ' \r', '\r', ' \r', '* ] 43% \r', ' \r', '\r', ' \r', '\r', ' \r', '4% \r', ' \r',
'\r', ' \r', '\r', ' \r', '5% \r', ' \r', '\r', ' \r', '\r', ' \r', '\r', ' \r', '\r', ' \r', '\r', ' \r', '\r', ' \r', '\r', ' \r', '\r',
' \r', '\r', ' \r', '\r', ' \r', '\r', ' \r', '\r', ' \r', '\r', ' \r', '\r', ' \r', '\r', ' \r', '\r', ' \r', '\r', ' \r', '\r', ' \r',
'\r', ' \r', '\r', ' \r', '\r', ' \r', '\r', ' \r', '\r', ' \r', '\r', ' \r', '\r', ' \r', '\r', ' \r', '\r', ' \r', '\r', ' \r', '\r',
' \r', '\r', ' \r', '\r', ' \r', '\r', ' \r', '\r', ' \r', '\r', ' \r', '* ] 45% \r', ' \r', '\r', ' \r', '\r',
' \r', '6% \r', ' \r', '\r', ' \r', '\r', ' \r', '7% \r', ' \r', '\r', ' \r', '\r', ' \r', '\r', ' \r', '\r', ' \r', '\r', ' \r', '\r',
' \r', '\r', ' \r', '\r', ' \r', '\r', ' \r', '\r', ' \r', '\r', ' \r', '\r', ' \r', '\r', ' \r', '\r', ' \r', '\r', ' \r', '\r', ' \r',
'\r', ' \r', '\r', ' \r', '\r', ' \r', '\r', ' \r', '\r', ' \r', '\r', ' \r', '\r', ' \r', '\r', ' \r', '\r', ' \r', '\r', ' \r', '\r',
' \r', '\r', ' \r', '\r', ' \r', '\r', ' \r', '\r', ' \r', '\r', ' \r', '\r', ' \r', '* ] 47% \r', ' \r', '\r',
' \r', '\r', ' \r', '8% \r', ' \r', '\r', ' \r', '\r', ' \r', '9% \r', ' \r', '\r', ' \r', '\r', ' \r', '\r', ' \r', '\r', ' \r', '\r',
' \r', '\r', ' \r', '\r', ' \r', '\r', ' \r', '\r', ' \r', '\r', ' \r', '\r', ' \r', '\r', ' \r', '\r', ' \r', '\r', ' \r', '\r', ' \r',
'\r', ' \r', '\r', ' \r', '\r', ' \r', '\r', ' \r', '\r', ' \r', '\r', ' \r', '\r', ' \r', '\r', ' \r', '\r', ' \r', '\r', ' \r', '\r',
' \r', '\r', ' \r', '\r', ' \r', '\r', ' \r', '\r', ' \r', '\r', ' \r', '\r', ' \r', '* ] 49% \r', ' \r', '\r',
' \r', '\r', ' \r', '\r', ' \r', '50% \r', ' \r', '\r', ' \r', '\r', ' \r', '\r', ' \r', '\r', ' \r', '\r', ' \r', '\r', ' \r', '\r', ' \r',
'\r', ' \r', '\r', ' \r', '\r', ' \r', '\r', ' \r', '\r', ' \r', '\r', ' \r', '\r', ' \r', '\r', ' \r', '\r', ' \r', '\r', ' \r', '\r',
' \r', '\r', ' \r', '\r', ' \r', '\r', ' \r', '\r', ' \r', '\r', ' \r', '\r', ' \r', '\r', ' \r', '\r', ' \r', '\r', ' \r', '\r', ' \r',
'\r', ' \r', '\r', ' \r', '\r', ' \r', '* ] 50% \r', ' \r', '\r', ' \r', '\r', ' \r', '1% \r', ' \r', '\r', ' \r',
'\r', ' \r', '2% \r', ' \r', '\r', ' \r', '\r', ' \r', '\r', ' \r', '\r', ' \r', '\r', ' \r', '\r', ' \r', '\r', ' \r', '\r', ' \r', '\r',
' \r', '\r', ' \r', '\r', ' \r', '\r', ' \r', '\r', ' \r', '\r', ' \r', '\r', ' \r', '\r', ' \r', '\r', ' \r', '\r', ' \r', '\r', ' \r',
'\r', ' \r', '\r', ' \r', '\r', ' \r', '\r', ' \r', '\r', ' \r', '\r', ' \r', '\r', ' \r', '\r', ' \r', '\r', ' \r', '\r', ' \r', '\r',
' \r', '* ] 52% \r', ' \r', '\r', ' \r', '\r', ' \r', '3% \r', ' \r', '\r', ' \r', '\r', ' \r', '4% \r', ' \r', '\r',
' \r', '\r', ' \r', '\r', ' \r', '\r', ' \r', '\r', ' \r', '\r', ' \r', '\r', ' \r', '\r', ' \r', '\r', ' \r', '\r', ' \r', '\r', ' \r',
'\r', ' \r', '\r', ' \r', '\r', ' \r', '\r', ' \r', '\r', ' \r', '\r', ' \r', '\r', ' \r', '\r', ' \r', '\r', ' \r', '\r', ' \r', '\r',
' \r', '\r', ' \r', '\r', ' \r', '\r', ' \r', '\r', ' \r', '\r', ' \r', '\r', ' \r', '\r', ' \r', '* ] 54% \r', ' \r',
'\r', ' \r', '\r', ' \r', '5% \r', ' \r', '\r', ' \r', '\r', ' \r', '6% \r', ' \r', '\r', ' \r', '\r', ' \r', '\r', ' \r', '\r', ' \r',
'\r', ' \r', '\r', ' \r', '\r', ' \r', '\r', ' \r', '\r', ' \r', '\r', ' \r', '\r', ' \r', '\r', ' \r', '\r', ' \r', '\r', ' \r', '\r',
' \r', '\r', ' \r', '\r', ' \r', '\r', ' \r', '\r', ' \r', '\r', ' \r', '\r', ' \r', '\r', ' \r', '\r', ' \r', '\r', ' \r', '\r', ' \r',
'\r', ' \r', '\r', ' \r', '\r', ' \r', '* ] 56% \r', ' \r', '\r', ' \r', '\r', ' \r', '7% \r', ' \r', '\r', ' \r',
'\r', ' \r', '8% \r', ' \r', '\r', ' \r', '\r', ' \r', '\r', ' \r', '\r', ' \r', '\r', ' \r', '\r', ' \r', '\r', ' \r', '\r', ' \r', '\r',
' \r', '\r', ' \r', '\r', ' \r', '\r', ' \r', '\r', ' \r', '\r', ' \r', '\r', ' \r', '\r', ' \r', '\r', ' \r', '\r', ' \r', '\r', ' \r',
'\r', ' \r', '\r', ' \r', '\r', ' \r', '\r', ' \r', '\r', ' \r', '\r', ' \r', '\r', ' \r', '\r', ' \r', '* ] 58% \r',
' \r', '\r', ' \r', '\r', ' \r', '9% \r', ' \r', '\r', ' \r', '\r', ' \r', '\r', ' \r', '60% \r', ' \r', '\r', ' \r', '\r', ' \r', '\r',
' \r', '\r', ' \r', '\r', ' \r', '\r', ' \r', '\r', ' \r', '\r', ' \r', '\r', ' \r', '\r', ' \r', '\r', ' \r', '\r', ' \r', '\r', ' \r',
'\r', ' \r', '\r', ' \r', '\r', ' \r', '\r', ' \r', '\r', ' \r', '\r', ' \r', '\r', ' \r', '\r', ' \r', '\r', ' \r', '\r', ' \r', '\r',
' \r', '\r', ' \r', '\r', ' \r', '* ] 60% \r', ' \r', '\r', ' \r', '\r', ' \r', '1% \r', ' \r', '\r', ' \r', '\r', ' \r',
'2% \r', ' \r', '\r', ' \r', '\r', ' \r', '\r', ' \r', '\r', ' \r', '\r', ' \r', '\r', ' \r', '\r', ' \r', '\r', ' \r', '\r', ' \r', '\r',
' \r', '\r', ' \r', '\r', ' \r', '\r', ' \r', '\r', ' \r', '\r', ' \r', '\r', ' \r', '\r', ' \r', '\r', ' \r', '\r', ' \r', '\r', ' \r',
'\r', ' \r', '\r', ' \r', '\r', ' \r', '\r', ' \r', '\r', ' \r', '* ] 62% \r', ' \r', '\r', ' \r', '\r', ' \r', '3% \r',
' \r', '\r', ' \r', '\r', ' \r', '4% \r', ' \r', '\r', ' \r', '\r', ' \r', '\r', ' \r', '\r', ' \r', '\r', ' \r', '\r', ' \r', '\r', ' \r',
'\r', ' \r', '\r', ' \r', '\r', ' \r', '\r', ' \r', '\r', ' \r', '\r', ' \r', '\r', ' \r', '\r', ' \r', '\r', ' \r', '\r', ' \r', '\r',
' \r', '\r', ' \r', '\r', ' \r', '\r', ' \r', '\r', ' \r', '\r', ' \r', '\r', ' \r', '* ] 64% \r', ' \r', '\r', ' \r',
'\r', ' \r', '5% \r', ' \r', '\r', ' \r', '\r', ' \r', '6% \r', ' \r', '\r', ' \r', '\r', ' \r', '\r', ' \r', '\r', ' \r', '\r', ' \r',
'\r', ' \r', '\r', ' \r', '\r', ' \r', '\r', ' \r', '\r', ' \r', '\r', ' \r', '\r', ' \r', '\r', ' \r', '\r', ' \r', '\r', ' \r', '\r',
' \r', '\r', ' \r', '\r', ' \r', '\r', ' \r', '\r', ' \r', '\r', ' \r', '\r', ' \r', '\r', ' \r', '* ] 66% \r', ' \r', '\r',
' \r', '\r', ' \r', '7% \r', ' \r', '\r', ' \r', '\r', ' \r', '8% \r', ' \r', '\r', ' \r', '\r', ' \r', '\r', ' \r', '\r', ' \r', '\r',
' \r', '\r', ' \r', '\r', ' \r', '\r', ' \r', '\r', ' \r', '\r', ' \r', '\r', ' \r', '\r', ' \r', '\r', ' \r', '\r', ' \r', '\r', ' \r',
'\r', ' \r', '\r', ' \r', '\r', ' \r', '\r', ' \r', '\r', ' \r', '\r', ' \r', '\r', ' \r', '* ] 68% \r', ' \r', '\r', ' \r',
'\r', ' \r', '9% \r', ' \r', '\r', ' \r', '\r', ' \r', '\r', ' \r', '70% \r', ' \r', '\r', ' \r', '\r', ' \r', '\r', ' \r', '\r', ' \r',
'\r', ' \r', '\r', ' \r', '\r', ' \r', '\r', ' \r', '\r', ' \r', '\r', ' \r', '\r', ' \r', '\r', ' \r', '\r', ' \r', '\r', ' \r', '\r',
' \r', '\r', ' \r', '\r', ' \r', '\r', ' \r', '\r', ' \r', '\r', ' \r', '\r', ' \r', '* ] 70% \r', ' \r', '\r', ' \r', '\r',
' \r', '1% \r', ' \r', '\r', ' \r', '\r', ' \r', '2% \r', ' \r', '\r', ' \r', '\r', ' \r', '\r', ' \r', '\r', ' \r', '\r', ' \r', '\r',
' \r', '\r', ' \r', '\r', ' \r', '\r', ' \r', '\r', ' \r', '\r', ' \r', '\r', ' \r', '\r', ' \r', '\r', ' \r', '\r', ' \r', '\r', ' \r',
'\r', ' \r', '\r', ' \r', '\r', ' \r', '\r', ' \r', '* ] 72% \r', ' \r', '\r', ' \r', '\r', ' \r', '3% \r', ' \r', '\r', ' \r',
'\r', ' \r', '4% \r', ' \r', '\r', ' \r', '\r', ' \r', '\r', ' \r', '\r', ' \r', '\r', ' \r', '\r', ' \r', '\r', ' \r', '\r', ' \r', '\r',
' \r', '\r', ' \r', '\r', ' \r', '\r', ' \r', '\r', ' \r', '\r', ' \r', '\r', ' \r', '\r', ' \r', '\r', ' \r', '\r', ' \r', '\r', ' \r',
'* ] 74% \r', ' \r', '\r', ' \r', '\r', ' \r', '5% \r', ' \r', '\r', ' \r', '\r', ' \r', '6% \r', ' \r', '\r', ' \r', '\r',
' \r', '\r', ' \r', '\r', ' \r', '\r', ' \r', '\r', ' \r', '\r', ' \r', '\r', ' \r', '\r', ' \r', '\r', ' \r', '\r', ' \r', '\r', ' \r',
'\r', ' \r', '\r', ' \r', '\r', ' \r', '\r', ' \r', '\r', ' \r', '\r', ' \r', '* ] 76% \r', ' \r', '\r', ' \r', '\r', ' \r',
'7% \r', ' \r', '\r', ' \r', '\r', ' \r', '8% \r', ' \r', '\r', ' \r', '\r', ' \r', '\r', ' \r', '\r', ' \r', '\r', ' \r', '\r', ' \r',
'\r', ' \r', '\r', ' \r', '\r', ' \r', '\r', ' \r', '\r', ' \r', '\r', ' \r', '\r', ' \r', '\r', ' \r', '\r', ' \r', '\r', ' \r', '\r',
' \r', '* ] 78% \r', ' \r', '\r', ' \r', '\r', ' \r', '9% \r', ' \r', '\r', ' \r', '\r', ' \r', '\r', ' \r', '80% \r', ' \r',
'\r', ' \r', '\r', ' \r', '\r', ' \r', '\r', ' \r', '\r', ' \r', '\r', ' \r', '\r', ' \r', '\r', ' \r', '\r', ' \r', '\r', ' \r', '\r',
' \r', '\r', ' \r', '\r', ' \r', '\r', ' \r', '\r', ' \r', '\r', ' \r', '* ] 80% \r', ' \r', '\r', ' \r', '\r', ' \r', '1% \r',
' \r', '\r', ' \r', '\r', ' \r', '2% \r', ' \r', '\r', ' \r', '\r', ' \r', '\r', ' \r', '\r', ' \r', '\r', ' \r', '\r', ' \r', '\r', ' \r',
'\r', ' \r', '\r', ' \r', '\r', ' \r', '\r', ' \r', '\r', ' \r', '\r', ' \r', '\r', ' \r', '\r', ' \r', '* ] 82% \r', ' \r', '\r',
' \r', '\r', ' \r', '3% \r', ' \r', '\r', ' \r', '\r', ' \r', '4% \r', ' \r', '\r', ' \r', '\r', ' \r', '\r', ' \r', '\r', ' \r', '\r',
' \r', '\r', ' \r', '\r', ' \r', '\r', ' \r', '\r', ' \r', '\r', ' \r', '\r', ' \r', '\r', ' \r', '\r', ' \r', '\r', ' \r',
'* ] 84% \r', ' \r', '\r', ' \r', '\r', ' \r', '5% \r', ' \r', '\r', ' \r', '\r', ' \r', '6% \r', ' \r', '\r', ' \r', '\r', ' \r',
'\r', ' \r', '\r', ' \r', '\r', ' \r', '\r', ' \r', '\r', ' \r', '\r', ' \r', '\r', ' \r', '\r', ' \r', '\r', ' \r', '\r', ' \r', '\r',
' \r', '* ] 86% \r', ' \r', '\r', ' \r', '\r', ' \r', '7% \r', ' \r', '\r', ' \r', '\r', ' \r', '8% \r', ' \r', '\r', ' \r', '\r',
' \r', '\r', ' \r', '\r', ' \r', '\r', ' \r', '\r', ' \r', '\r', ' \r', '\r', ' \r', '\r', ' \r', '\r', ' \r', '\r', ' \r', '\r', ' \r',
'* ] 88% \r', ' \r', '\r', ' \r', '\r', ' \r', '9% \r', ' \r', '\r', ' \r', '\r', ' \r', '\r', ' \r', '90% \r', ' \r', '\r', ' \r',
'\r', ' \r', '\r', ' \r', '\r', ' \r', '\r', ' \r', '\r', ' \r', '\r', ' \r', '\r', ' \r', '\r', ' \r', '\r', ' \r', '\r', ' \r',
'* ] 90% \r', ' \r', '\r', ' \r', '\r', ' \r', '1% \r', ' \r', '\r', ' \r', '\r', ' \r', '2% \r', ' \r', '\r', ' \r', '\r', ' \r', '\r',
' \r', '\r', ' \r', '\r', ' \r', '\r', ' \r', '\r', ' \r', '\r', ' \r', '\r', ' \r', '\r', ' \r', '* ] 92% \r', ' \r', '\r', ' \r', '\r',
' \r', '3% \r', ' \r', '\r', ' \r', '\r', ' \r', '4% \r', ' \r', '\r', ' \r', '\r', ' \r', '\r', ' \r', '\r', ' \r', '\r', ' \r', '\r',
' \r', '\r', ' \r', '\r', ' \r', '\r', ' \r', '* ] 94% \r', ' \r', '\r', ' \r', '\r', ' \r', '5% \r', ' \r', '\r', ' \r', '\r', ' \r',
'6% \r', ' \r', '\r', ' \r', '\r', ' \r', '\r', ' \r', '\r', ' \r', '\r', ' \r', '\r', ' \r', '\r', ' \r', '\r', ' \r', '* ] 96% \r',
' \r', '\r', ' \r', '\r', ' \r', '7% \r', ' \r', '\r', ' \r', '\r', ' \r', '8% \r', ' \r', '\r', ' \r', '\r', ' \r', '\r', ' \r', '\r',
' \r', '\r', ' \r', '\r', ' \r', '\r', ' \r', '* ] 98% \r', ' \r', '\r', ' \r', '\r', ' \r', '9% \r', ' \r', '\r', ' \r', '\r', ' \r', '\r',
' \r', '\r', ' \r', '\r', ' \r', '\r', ' \r', '*] 100% \r', ' \r', '\r', ' \r', '\r', ' \r', '\r', ' \r', '\r', ' \r', '\r', ' \r', '\r',
' \r', '\r', ' \r', '\r', ' \r', '\r', ' \r', '\r', ' \r', '\r', ' \r', '\r', ' \r', '\r', ' \r', '\r', ' \r', '\r', ' \r', '\r', ' \r',
'\r', ' \r', '\r', ' \r', '\r', ' \r', '\r', ' \r', '\r', ' \r', '\r', ' \r', '\r', ' \r', '\r', ' \r', '\r', ' \r', '\r', ' \r', '\r',
' \r', '\r', ' \r', '\r', ' \r', '\r', ' \r', '\r', ' \r', '\r', ' \r', '\r', ' \r', '\r', ' \r', '\r', ' \r', '\r', ' \r', '\r', ' \r',
'\r', ' \r', '\r', ' \r', '\r', ' \r', '\r', ' \r', '\r', ' \r', '\r', ' \r', '\r', ' \r', '\r', ' \r', '\r', ' \r', '\r', ' \r', '\r',
' \r', '\r', ' \r', '\r', ' \r', '\r', ' \r', '\r', ' \r', '\r', ' \r', '\r', ' \r', '\r', ' \r', '\r', ' \r', '\r', ' \r', '\r', ' \r',
'done.\r', ' \r', '\r', ' \r', '\r', ' \r', '\r', ' \r', '\r', ' \r', '\r', ' \r', '\r', ' \r', '\r', ' \r', '\r', ' \r', '\r', ' \r', '\r',
' \r', '\r', ' \r', '\r', ' \r', '\r', ' \r', '\r', ' \r', '\r', ' \r', '\r', ' \r', '\r', ' \r', '\r', ' \r', '\r', ' \r', '\r', ' \r',
'\r', ' \r', '\r', ' \r', '\r', ' \r', '\r', ' \r', 'Closing... (this might take a while) [********************************* ] 99% \r',
' \r', '\r', ' \r', '\r', ' \r', '\r', ' \r', '\r', ' \r', '\r', ' \r', '\r', ' \r', '\r', ' \r', '\r', ' \r', '\r', ' \r', '\r', ' \r',
'\r', ' \r', '\r', ' \r', '\r', ' \r', '\r', ' \r', '\r', ' \r', '\r', ' \r', '\r', ' \r', '\r', ' \r', '\r', ' \r', '\r', ' \r', '\r',
' \r', '\r', ' \r', '\r', ' \r', '\r', ' \r', '\r', ' \r', '\r', ' \r', '\r', ' \r', '\r', ' \r', '\r', ' \r', '\r', ' \r', '\r', ' \r',
'\r', ' \r', '\r', ' \r', '\r', ' \r', '\r', ' \r', '\r', ' \r', '\r', ' \r', '\r', ' \r', '\r', ' \r', '\r', ' \r', '\r', ' \r',
' done.\r', ' \r', 'Burn completed.\n', '\n']
print(actual)
self.assertEqual(actual, expected)
@staticmethod
def __test_resource(name: str) -> str:
script_path = Path(os.path.realpath(__file__)).parent.parent
return f'{script_path}/test-resources/{name}'
if __name__ == '__main__':
unittest.main()
| 131.256604
| 160
| 0.151741
| 4,470
| 34,783
| 1.168456
| 0.038031
| 1.529389
| 2.205629
| 2.823665
| 0.818495
| 0.818495
| 0.818495
| 0.818495
| 0.8139
| 0.8139
| 0
| 0.010558
| 0.357358
| 34,783
| 264
| 161
| 131.753788
| 0.223057
| 0.01104
| 0
| 0.510549
| 0
| 0.004219
| 0.370696
| 0.002937
| 0
| 0
| 0
| 0
| 0.004219
| 1
| 0.008439
| false
| 0
| 0.016878
| 0
| 0.033755
| 0.004219
| 0
| 0
| 1
| null | 1
| 1
| 1
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 12
|
14914fdc07cff57112397b693e96322c31fd45c6
| 117
|
py
|
Python
|
api/routes/providers/mpesa/portalsdk/__init__.py
|
ProsperChihimba/payment-project
|
c58d446077ddd168940f67c4ef1e2b5c2407a2cc
|
[
"MIT"
] | 6
|
2021-12-04T06:01:36.000Z
|
2022-02-28T22:46:38.000Z
|
api/routes/providers/mpesa/portalsdk/__init__.py
|
ProsperChihimba/payment-project
|
c58d446077ddd168940f67c4ef1e2b5c2407a2cc
|
[
"MIT"
] | null | null | null |
api/routes/providers/mpesa/portalsdk/__init__.py
|
ProsperChihimba/payment-project
|
c58d446077ddd168940f67c4ef1e2b5c2407a2cc
|
[
"MIT"
] | null | null | null |
from .api import APIContext
from .api import APIMethodType
from .api import APIRequest
from .api import APIResponse
| 19.5
| 30
| 0.820513
| 16
| 117
| 6
| 0.4375
| 0.291667
| 0.541667
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.145299
| 117
| 5
| 31
| 23.4
| 0.96
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| true
| 0
| 1
| 0
| 1
| 0
| 1
| 0
| 0
| null | 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 0
| 1
| 0
|
0
| 7
|
1497d91432ea77fa71d064dcf2820e154ed95b2c
| 39,950
|
py
|
Python
|
dev/StarterGame/AWS/project-code/test/test_AccessResourceHandler.py
|
jeikabu/lumberyard
|
07228c605ce16cbf5aaa209a94a3cb9d6c1a4115
|
[
"AML"
] | 8
|
2019-10-07T16:33:47.000Z
|
2020-12-07T03:59:58.000Z
|
dev/StarterGame/AWS/project-code/test/test_AccessResourceHandler.py
|
jeikabu/lumberyard
|
07228c605ce16cbf5aaa209a94a3cb9d6c1a4115
|
[
"AML"
] | null | null | null |
dev/StarterGame/AWS/project-code/test/test_AccessResourceHandler.py
|
jeikabu/lumberyard
|
07228c605ce16cbf5aaa209a94a3cb9d6c1a4115
|
[
"AML"
] | 5
|
2020-08-27T20:44:18.000Z
|
2021-08-21T22:54:11.000Z
|
#
# All or portions of this file Copyright (c) Amazon.com, Inc. or its affiliates or
# its licensors.
#
# For complete copyright and license terms please see the LICENSE at the root of this
# distribution (the "License"). All use of this software is governed by the License,
# or, if provided, by the license below or the license accompanying this file. Do not
# remove or modify any license notices. This file is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
#
# $Revision: #1 $
import unittest
import mock
import json
import boto3
from botocore.exceptions import ClientError
import discovery_utils
import errors
import custom_resource_response
import AccessResourceHandler
class TestAccess(unittest.TestCase):
event = {}
context = {}
def setUp(self):
reload(AccessResourceHandler)
self.event = {
'ResourceProperties': {
'ConfigurationBucket': 'TestBucket',
'ConfigurationKey': 'TestKey',
'RoleLogicalId': 'AccessRole',
'MetadataKey' : 'ResourceAccess',
'UsePropagationDelay' : 'false',
'RequireRoleExists' : 'false',
'PhysicalResourceId' : 'CloudCanvas:TestAccess:TestStackName'
},
'StackId': 'TestStackId',
'LogicalResourceId': 'TestLogicalResourceId'
}
self.properties = self.event['ResourceProperties']
def test_handler_with_no_resource_group_or_deployment_stack(self):
print errors.ValidationError
with self.assertRaises(errors.ValidationError):
AccessResourceHandler.handler(self.event, self.context)
def test_handler_with_both_resource_group_and_deployment_stack(self):
self.properties['ResourceGroupStack'] = 'TestResourceGroupStack'
self.properties['DeploymentStack'] = 'TestDeploymentStack'
with self.assertRaises(errors.ValidationError):
AccessResourceHandler.handler(self.event, self.context)
def test_handler_with_resource_group_stack_create_without_role(self):
self._do_handler_with_resource_group_stack_test('Create', with_access_stack=False)
def test_handler_with_resource_group_stack_create_with_role(self):
self._do_handler_with_resource_group_stack_test('Create')
def test_handler_with_resource_group_stack_update(self):
self._do_handler_with_resource_group_stack_test('Update')
def _do_handler_with_resource_group_stack_test(self, request_type, with_access_stack = True):
self.properties['ResourceGroupStack'] = 'TestResourceGroupStackId'
self.event['RequestType'] = request_type
mock_aim_client = mock.MagicMock()
mock_put_role_policy = mock.MagicMock(return_value={})
mock_aim_client.put_role_policy = mock_put_role_policy
def mock_describe_stack_resources_side_effect(PhysicalResourceId=None, StackName=None):
if PhysicalResourceId is None and StackName == 'TestProjectStackId':
return {
'StackResources': [
{
'PhysicalResourceId': 'TestDeploymentStackId',
'LogicalResourceId': 'TestDeploymentStackName',
'ResourceType': 'AWS::CloudFormation::Stack',
'StackId': 'TestProjectStackId'
}
]
}
if PhysicalResourceId == 'TestResourceGroupStackId' or StackName == 'TestDeploymentStackId':
return {
'StackResources': [
{
'PhysicalResourceId': 'TestResourceGroupStackId',
'LogicalResourceId': 'TestResourceGroupStackName',
'ResourceType': 'AWS::CloudFormation::Stack',
'StackId': 'TestDeploymentStackId'
}
]
}
if with_access_stack and StackName == 'TestDeploymentStackId-Access':
return {
'StackResources': [
{
'PhysicalResourceId': 'AccessRoleResourceId',
'LogicalResourceId': 'AccessRole',
'ResourceType': 'AWS::IAM::Role',
'StackId': 'TestDeploymentStackId-Access'
}
]
}
if StackName == 'TestResourceGroupStackId':
return {
'StackResources': [
{
'PhysicalResourceId': 'AccessibleResourceId1',
'LogicalResourceId': 'AccessibleResourceName1',
'ResourceType': 'TestResourceType1',
'StackId': 'TestResourceGroupStackId'
},
{
'PhysicalResourceId': 'AccessibleResourceId2',
'LogicalResourceId': 'AccessibleResourceName2',
'ResourceType': 'TestResourceType2',
'StackId': 'TestResourceGroupStackId'
},
{
'PhysicalResourceId': 'NonAccessibleResourceId',
'LogicalResourceId': 'NonAccessibleResourceName',
'ResourceType': 'TestResourceType',
'StackId': 'TestResourceGroupStackId'
}
]
}
print '**** mock_describe_stack_resources_side_effect ***', PhysicalResourceId, StackName
return None
def mock_describe_stack_resource_side_effect(StackName=None, LogicalResourceId=None):
if StackName != 'TestResourceGroupStackId':
return None
if LogicalResourceId == 'AccessibleResourceName1':
return {
'StackResourceDetail': {
'PhysicalResourceId': 'AccessibleResourceId1',
'LogicalResourceId': 'AccessibleResourceName1',
'ResourceType': 'TestResourceType1',
'StackId': 'TestResourceGroupStackId',
'Metadata': '''{
"CloudCanvas": {
"ResourceAccess": {
"Action": "TestAction1"
}
}
}'''
}
}
if LogicalResourceId == 'AccessibleResourceName2':
return {
'StackResourceDetail': {
'PhysicalResourceId': 'AccessibleResourceId2',
'LogicalResourceId': 'AccessibleResourceName2',
'ResourceType': 'TestResourceType2',
'StackId': 'TestResourceGroupStackId',
'Metadata': '''{
"CloudCanvas": {
"ResourceAccess": {
"Action": [ "TestAction2A", "TestAction2B" ],
"ResourceSuffix": "TestSuffix"
}
}
}'''
}
}
if LogicalResourceId == 'NonAccessibleResourceName':
return {
'StackResourceDetail': {
'PhysicalResourceId': 'NonAccessibleResourceId',
'LogicalResourceId': 'NonAccessibleResourceName',
'ResourceType': 'TestResourceType',
'StackId': 'TestResourceGroupStackId'
}
}
return None
def mock_describe_stacks_side_effect(StackName=None):
if StackName == 'TestDeploymentStackName-Access':
if with_access_stack:
return {
'Stacks': [
{
'StackId': 'TestDeploymentStackId-Access',
'StackStatus': 'CREATE_COMPLETE'
}
]
}
else:
return {
'Stacks': [
]
}
if StackName == 'TestDeploymentStackId':
return {
'Stacks': [
{
'Parameters': [
{
'ParameterKey': 'DeploymentName',
'ParameterValue': 'TestDeployment'
},
{
'ParameterKey': 'ProjectStackId',
'ParameterValue': 'TestProjectStackId'
}
]
}
]
}
return None
def mock_get_stack_name_from_stack_arn_side_effect(stack_arn):
return stack_arn.replace('Id', 'Name')
def mock_get_resource_arn_side_effect(stack_arn, resource_type, resource_name):
return resource_name + 'Arn'
mock_cf_client = mock.MagicMock()
mock_cf_client.describe_stack_resources = mock.MagicMock(side_effect=mock_describe_stack_resources_side_effect)
mock_cf_client.describe_stack_resource = mock.MagicMock(side_effect=mock_describe_stack_resource_side_effect)
mock_cf_client.describe_stacks = mock.MagicMock(side_effect=mock_describe_stacks_side_effect)
def boto3_client_side_effect(client_type, region_name=None):
if client_type == 'iam':
return mock_aim_client
elif client_type == 'cloudformation':
return mock_cf_client
else:
return None
expected_data = {}
expected_physical_id = 'CloudCanvas:TestAccess:TestStackName'
with mock.patch.object(custom_resource_response, 'succeed') as mock_custom_resource_response_succeed:
with mock.patch.object(boto3, 'client') as mock_boto3_client:
with mock.patch.object(discovery_utils, 'get_stack_name_from_stack_arn') as mock_get_stack_name_from_stack_arn:
with mock.patch.object(discovery_utils, 'get_region_from_stack_arn') as mock_get_region_from_stack_arn:
with mock.patch.object(discovery_utils, 'get_account_id_from_stack_arn') as mock_get_account_id_from_stack_arn:
with mock.patch.object(discovery_utils, 'get_resource_arn') as mock_get_resource_arn:
mock_get_stack_name_from_stack_arn.side_effect = mock_get_stack_name_from_stack_arn_side_effect
mock_get_region_from_stack_arn.return_value = 'TestRegion'
mock_get_account_id_from_stack_arn.return_value = 'TestAccount'
mock_get_resource_arn.side_effect = mock_get_resource_arn_side_effect
mock_boto3_client.side_effect = boto3_client_side_effect
reload(AccessResourceHandler)
AccessResourceHandler.handler(self.event, self.context)
if not with_access_stack:
self.assertEquals(mock_put_role_policy.call_count, 0)
else:
self.assertEquals(mock_put_role_policy.call_count, 1)
mock_put_role_policy_kwargs = mock_put_role_policy.call_args[1]
self.assertEquals(mock_put_role_policy_kwargs['RoleName'], 'AccessRoleResourceId')
self.assertEquals(mock_put_role_policy_kwargs['PolicyName'], 'TestResourceGroupStackName')
policy_document_string = mock_put_role_policy_kwargs['PolicyDocument']
self.assertTrue(isinstance(policy_document_string, basestring))
policy_document = json.loads(policy_document_string)
expected_policy_document = {
u'Version': u'2012-10-17',
u'Statement': [
{
u'Sid': u'AccessibleResourceName1Access',
u'Effect': u'Allow',
u'Action': [ u'TestAction1' ],
u'Resource': u'AccessibleResourceId1Arn'
},
{
u'Sid': u'AccessibleResourceName2Access',
u'Effect': u'Allow',
u'Action': [ u'TestAction2A', u'TestAction2B' ],
u'Resource': u'AccessibleResourceId2ArnTestSuffix'
}
]
}
self.maxDiff = None
self.assertEquals(expected_policy_document, policy_document)
mock_custom_resource_response_succeed.assert_called_once_with(
self.event,
self.context,
expected_data,
expected_physical_id)
def test_handler_with_resource_group_stack_delete(self):
self.properties['ResourceGroupStack'] = 'TestResourceGroupStackId'
self.event['RequestType'] = 'Delete'
mock_aim_client = mock.MagicMock()
mock_delete_role_policy = mock.MagicMock(return_value={})
mock_aim_client.delete_role_policy = mock_delete_role_policy
def mock_describe_stack_resources_side_effect(PhysicalResourceId=None, StackName=None):
if PhysicalResourceId == 'TestResourceGroupStackId' or StackName == 'TestDeploymentStackId':
return {
'StackResources': [
{
'PhysicalResourceId': 'TestResourceGroupStackId',
'LogicalResourceId': 'TestResourceGroupStackName',
'ResourceType': 'AWS::CloudFormation::Stack',
'StackId': 'TestDeploymentStackId'
}
]
}
if StackName == 'TestDeploymentStackId-Access':
return {
'StackResources': [
{
'PhysicalResourceId': 'AccessRoleResourceId',
'LogicalResourceId': 'AccessRole',
'ResourceType': 'AWS::IAM::Role',
'StackId': 'TestDeploymentStackId-Access'
}
]
}
if StackName == 'TestResourceGroupStackId':
return {
'StackResources': [
{
'PhysicalResourceId': 'AccessibleResourceId1',
'LogicalResourceId': 'AccessibleResourceName1',
'ResourceType': 'TestResourceType1',
'StackId': 'TestResourceGroupStackId'
},
{
'PhysicalResourceId': 'AccessibleResourceId2',
'LogicalResourceId': 'AccessibleResourceName2',
'ResourceType': 'TestResourceType2',
'StackId': 'TestResourceGroupStackId'
},
{
'PhysicalResourceId': 'NonAccessibleResourceId',
'LogicalResourceId': 'NonAccessibleResourceName',
'ResourceType': 'TestResourceType',
'StackId': 'TestResourceGroupStackId'
}
]
}
print '**** mock_describe_stack_resources_side_effect ***', PhysicalResourceId, StackName
return None
def mock_describe_stacks_side_effect(StackName=None):
if StackName == 'TestDeploymentStackName-Access':
return {
'Stacks': [
{
'StackId': 'TestDeploymentStackId-Access',
'StackStatus': 'CREATE_COMPLETE'
}
]
}
if StackName == 'TestDeploymentStackId':
return {
'Stacks': [
{
'Parameters': [
{
'ParameterKey': 'DeploymentName',
'ParameterValue': 'TestDeployment'
},
{
'ParameterKey': 'ProjectStackId',
'ParameterValue': 'TestProjectStackId'
}
]
}
]
}
return None
def mock_get_stack_name_from_stack_arn_side_effect(stack_arn):
return stack_arn.replace('Id', 'Name')
mock_cf_client = mock.MagicMock()
mock_cf_client.describe_stack_resources = mock.MagicMock(side_effect=mock_describe_stack_resources_side_effect)
mock_cf_client.describe_stacks = mock.MagicMock(side_effect=mock_describe_stacks_side_effect)
def boto3_client_side_effect(client_type, region_name=None):
if client_type == 'iam':
return mock_aim_client
elif client_type == 'cloudformation':
return mock_cf_client
else:
return None
expected_data = {}
expected_physical_id = 'CloudCanvas:TestAccess:TestStackName'
with mock.patch.object(custom_resource_response, 'succeed') as mock_custom_resource_response_succeed:
with mock.patch.object(boto3, 'client') as mock_boto3_client:
with mock.patch.object(discovery_utils, 'get_stack_name_from_stack_arn') as mock_get_stack_name_from_stack_arn:
with mock.patch.object(discovery_utils, 'get_region_from_stack_arn') as mock_get_region_from_stack_arn:
with mock.patch.object(discovery_utils, 'get_account_id_from_stack_arn') as mock_get_account_id_from_stack_arn:
mock_get_stack_name_from_stack_arn.side_effect = mock_get_stack_name_from_stack_arn_side_effect
mock_get_region_from_stack_arn.return_value = 'TestRegion'
mock_get_account_id_from_stack_arn.return_value = 'TestAccount'
mock_boto3_client.side_effect = boto3_client_side_effect
reload(AccessResourceHandler)
AccessResourceHandler.handler(self.event, self.context)
mock_delete_role_policy.assert_called_once_with(RoleName='AccessRoleResourceId', PolicyName='TestResourceGroupStackName')
mock_custom_resource_response_succeed.assert_called_once_with(
self.event,
self.context,
expected_data,
expected_physical_id)
def test_handler_with_resource_group_stack_delete_without_role(self):
self.properties['ResourceGroupStack'] = 'TestResourceGroupStackId'
self.event['RequestType'] = 'Delete'
mock_aim_client = mock.MagicMock()
mock_delete_role_policy = mock.MagicMock(return_value={})
mock_aim_client.delete_role_policy = mock_delete_role_policy
def mock_describe_stack_resources_side_effect(PhysicalResourceId=None, StackName=None):
if PhysicalResourceId is None and StackName == 'TestProjectStackId':
return {
'StackResources': [
{
'PhysicalResourceId': 'TestDeploymentStackId',
'LogicalResourceId': 'TestDeploymentStackName',
'ResourceType': 'AWS::CloudFormation::Stack',
'StackId': 'TestProjectStackId'
}
]
}
if PhysicalResourceId == 'TestResourceGroupStackId' or StackName == 'TestDeploymentStackId':
return {
'StackResources': [
{
'PhysicalResourceId': 'TestResourceGroupStackId',
'LogicalResourceId': 'TestResourceGroupStackName',
'ResourceType': 'AWS::CloudFormation::Stack',
'StackId': 'TestDeploymentStackId'
}
]
}
if StackName == 'TestResourceGroupStackId':
return {
'StackResources': [
{
'PhysicalResourceId': 'AccessibleResourceId1',
'LogicalResourceId': 'AccessibleResourceName1',
'ResourceType': 'TestResourceType1',
'StackId': 'TestResourceGroupStackId'
},
{
'PhysicalResourceId': 'AccessibleResourceId2',
'LogicalResourceId': 'AccessibleResourceName2',
'ResourceType': 'TestResourceType2',
'StackId': 'TestResourceGroupStackId'
},
{
'PhysicalResourceId': 'NonAccessibleResourceId',
'LogicalResourceId': 'NonAccessibleResourceName',
'ResourceType': 'TestResourceType',
'StackId': 'TestResourceGroupStackId'
}
]
}
print '**** mock_describe_stack_resources_side_effect ***', PhysicalResourceId, StackName
return None
def mock_describe_stacks_side_effect(StackName=None):
if StackName == 'TestDeploymentStackName-Access':
return {
'Stacks': [
]
}
if StackName == 'TestDeploymentStackId':
return {
'Stacks': [
{
'Parameters': [
{
'ParameterKey': 'DeploymentName',
'ParameterValue': 'TestDeployment'
},
{
'ParameterKey': 'ProjectStackId',
'ParameterValue': 'TestProjectStackId'
}
]
}
]
}
return None
def mock_get_stack_name_from_stack_arn_side_effect(stack_arn):
return stack_arn.replace('Id', 'Name')
mock_cf_client = mock.MagicMock()
mock_cf_client.describe_stack_resources = mock.MagicMock(side_effect=mock_describe_stack_resources_side_effect)
mock_cf_client.describe_stacks = mock.MagicMock(side_effect=mock_describe_stacks_side_effect)
def boto3_client_side_effect(client_type, region_name=None):
if client_type == 'iam':
return mock_aim_client
elif client_type == 'cloudformation':
return mock_cf_client
else:
return None
expected_data = {}
expected_physical_id = 'CloudCanvas:TestAccess:TestStackName'
with mock.patch.object(custom_resource_response, 'succeed') as mock_custom_resource_response_succeed:
with mock.patch.object(boto3, 'client') as mock_boto3_client:
with mock.patch.object(discovery_utils, 'get_stack_name_from_stack_arn') as mock_get_stack_name_from_stack_arn:
with mock.patch.object(discovery_utils, 'get_region_from_stack_arn') as mock_get_region_from_stack_arn:
with mock.patch.object(discovery_utils, 'get_account_id_from_stack_arn') as mock_get_account_id_from_stack_arn:
mock_get_stack_name_from_stack_arn.side_effect = mock_get_stack_name_from_stack_arn_side_effect
mock_get_region_from_stack_arn.return_value = 'TestRegion'
mock_get_account_id_from_stack_arn.return_value = 'TestAccount'
mock_boto3_client.side_effect = boto3_client_side_effect
reload(AccessResourceHandler)
AccessResourceHandler.handler(self.event, self.context)
self.assertEquals(0, mock_delete_role_policy.called)
mock_custom_resource_response_succeed.assert_called_once_with(
self.event,
self.context,
expected_data,
expected_physical_id)
def test_handler_with_resource_group_stack_delete_with_role_deleted(self):
self.properties['ResourceGroupStack'] = 'TestResourceGroupStackId'
self.event['RequestType'] = 'Delete'
def mock_delete_role_policy_side_effect(RoleName=None, PolicyName=None):
raise ClientError({ "Error": { "Code": "AccessDenied" } }, 'DeleteRolePolicy')
mock_aim_client = mock.MagicMock()
mock_delete_role_policy = mock.MagicMock(return_value={})
mock_delete_role_policy.side_effect = mock_delete_role_policy_side_effect
mock_aim_client.delete_role_policy = mock_delete_role_policy
def mock_describe_stack_resources_side_effect(PhysicalResourceId=None, StackName=None):
if PhysicalResourceId == 'TestResourceGroupStackId' or StackName == 'TestDeploymentStackId':
return {
'StackResources': [
{
'PhysicalResourceId': 'TestResourceGroupStackId',
'LogicalResourceId': 'TestResourceGroupStackName',
'ResourceType': 'AWS::CloudFormation::Stack',
'StackId': 'TestDeploymentStackId'
}
]
}
if StackName == 'TestDeploymentStackId-Access':
return {
'StackResources': [
{
'PhysicalResourceId': 'AccessRoleResourceId',
'LogicalResourceId': 'AccessRole',
'ResourceType': 'AWS::IAM::Role',
'StackId': 'TestDeploymentStackId-Access'
}
]
}
if StackName == 'TestResourceGroupStackId':
return {
'StackResources': [
{
'PhysicalResourceId': 'AccessibleResourceId1',
'LogicalResourceId': 'AccessibleResourceName1',
'ResourceType': 'TestResourceType1',
'StackId': 'TestResourceGroupStackId'
},
{
'PhysicalResourceId': 'AccessibleResourceId2',
'LogicalResourceId': 'AccessibleResourceName2',
'ResourceType': 'TestResourceType2',
'StackId': 'TestResourceGroupStackId'
},
{
'PhysicalResourceId': 'NonAccessibleResourceId',
'LogicalResourceId': 'NonAccessibleResourceName',
'ResourceType': 'TestResourceType',
'StackId': 'TestResourceGroupStackId'
}
]
}
print '**** mock_describe_stack_resources_side_effect ***', PhysicalResourceId, StackName
return None
def mock_describe_stacks_side_effect(StackName=None):
if StackName == 'TestDeploymentStackName-Access':
return {
'Stacks': [
{
'StackId': 'TestDeploymentStackId-Access',
'StackStatus': 'CREATE_COMPLETE'
}
]
}
if StackName == 'TestDeploymentStackId':
return {
'Stacks': [
{
'Parameters': [
{
'ParameterKey': 'DeploymentName',
'ParameterValue': 'TestDeployment'
},
{
'ParameterKey': 'ProjectStackId',
'ParameterValue': 'TestProjectStackId'
}
]
}
]
}
return None
def mock_get_stack_name_from_stack_arn_side_effect(stack_arn):
return stack_arn.replace('Id', 'Name')
mock_cf_client = mock.MagicMock()
mock_cf_client.describe_stack_resources = mock.MagicMock(side_effect=mock_describe_stack_resources_side_effect)
mock_cf_client.describe_stacks = mock.MagicMock(side_effect=mock_describe_stacks_side_effect)
def boto3_client_side_effect(client_type, region_name=None):
if client_type == 'iam':
return mock_aim_client
elif client_type == 'cloudformation':
return mock_cf_client
else:
return None
expected_data = {}
expected_physical_id = 'CloudCanvas:TestAccess:TestStackName'
with mock.patch.object(custom_resource_response, 'succeed') as mock_custom_resource_response_succeed:
with mock.patch.object(boto3, 'client') as mock_boto3_client:
with mock.patch.object(discovery_utils, 'get_stack_name_from_stack_arn') as mock_get_stack_name_from_stack_arn:
with mock.patch.object(discovery_utils, 'get_region_from_stack_arn') as mock_get_region_from_stack_arn:
with mock.patch.object(discovery_utils, 'get_account_id_from_stack_arn') as mock_get_account_id_from_stack_arn:
mock_get_stack_name_from_stack_arn.side_effect = mock_get_stack_name_from_stack_arn_side_effect
mock_get_region_from_stack_arn.return_value = 'TestRegion'
mock_get_account_id_from_stack_arn.return_value = 'TestAccount'
mock_boto3_client.side_effect = boto3_client_side_effect
reload(AccessResourceHandler)
AccessResourceHandler.handler(self.event, self.context)
mock_delete_role_policy.assert_called_once_with(RoleName='AccessRoleResourceId', PolicyName='TestResourceGroupStackName')
mock_custom_resource_response_succeed.assert_called_once_with(
self.event,
self.context,
expected_data,
expected_physical_id)
def test_handler_with_deployment_stack(self):
self.properties['DeploymentStack'] = 'TestDeploymentStackId'
self.event['RequestType'] = 'TestRequestType'
def mock_describe_stack_resources_side_effect(PhysicalResourceId=None, StackName=None):
if StackName == 'TestDeploymentStackId':
return {
'StackResources': [
{
'PhysicalResourceId': 'TestResourceGroupStackId1',
'LogicalResourceId': 'TestResourceGroupStackName1',
'ResourceType': 'AWS::CloudFormation::Stack',
'StackId': 'TestDeploymentStackId'
},
{
'PhysicalResourceId': 'TestResourceGroupStackId2',
'LogicalResourceId': 'TestResourceGroupStackName2',
'ResourceType': 'AWS::CloudFormation::Stack',
'StackId': 'TestDeploymentStackId'
},
{
'PhysicalResourceId': 'OtherId',
'LogicalResourceId': 'OtherName',
'ResourceType': 'OtherType',
'StackId': 'TestDeploymentStackId'
}
]
}
if StackName == 'TestDeploymentStackId-Access':
return {
'StackResources': [
{
'PhysicalResourceId': 'AccessRoleResourceId',
'LogicalResourceId': 'AccessRole',
'ResourceType': 'AWS::IAM::Role',
'StackId': 'TestDeploymentStackId-Access'
}
]
}
print '**** mock_describe_stack_resources_side_effect ***', PhysicalResourceId, StackName
return None
def mock_get_stack_name_from_stack_arn_side_effect(stack_arn):
return stack_arn.replace('Id', 'Name')
def mock_describe_stacks_side_effect(StackName = None):
if StackName == 'TestDeploymentStackName-Access':
return {
'Stacks': [
{
'StackId': 'TestDeploymentStackId-Access',
'StackStatus': 'CREATE_COMPLETE'
}
]
}
if StackName == 'TestDeploymentStackId':
return {
'Stacks': [
{
'Parameters': [
{
'ParameterKey': 'DeploymentName',
'ParameterValue': 'TestDeployment'
},
{
'ParameterKey': 'ProjectStackId',
'ParameterValue': 'TestProjectStackId'
}
]
}
]
}
return None
mock_cf_client = mock.MagicMock()
mock_cf_client.describe_stack_resources = mock.MagicMock(side_effect=mock_describe_stack_resources_side_effect)
mock_cf_client.describe_stacks = mock.MagicMock(side_effect=mock_describe_stacks_side_effect)
def boto3_client_side_effect(client_type, region_name=None):
if client_type == 'cloudformation':
return mock_cf_client
else:
return None
expected_data = {}
expected_physical_id = 'CloudCanvas:TestAccess:TestStackName'
with mock.patch.object(custom_resource_response, 'succeed') as mock_custom_resource_response_succeed:
with mock.patch.object(boto3, 'client') as mock_boto3_client:
with mock.patch.object(discovery_utils, 'get_stack_name_from_stack_arn') as mock_get_stack_name_from_stack_arn:
with mock.patch.object(discovery_utils, 'get_region_from_stack_arn') as mock_get_region_from_stack_arn:
with mock.patch.object(discovery_utils, 'get_account_id_from_stack_arn') as mock_get_account_id_from_stack_arn:
mock_get_stack_name_from_stack_arn.side_effect = mock_get_stack_name_from_stack_arn_side_effect
mock_get_region_from_stack_arn.return_value = 'TestRegion'
mock_get_account_id_from_stack_arn.return_value = 'TestAccount'
mock_boto3_client.side_effect = boto3_client_side_effect
reload(AccessResourceHandler)
with mock.patch.object(AccessResourceHandler, '_process_resource_group_stack') as mock_process_resource_group_stack:
AccessResourceHandler.handler(self.event, self.context)
self.assertEquals(mock_process_resource_group_stack.call_count, 2)
mock_process_resource_group_stack.assert_any_call('TestRequestType', ResourceGroupInfoForResourceGroup('TestResourceGroupStackName1'), 'AccessRoleResourceId', 'ResourceAccess', False)
mock_process_resource_group_stack.assert_any_call('TestRequestType', ResourceGroupInfoForResourceGroup('TestResourceGroupStackName2'), 'AccessRoleResourceId', 'ResourceAccess', False)
mock_custom_resource_response_succeed.assert_called_once_with(
self.event,
self.context,
expected_data,
expected_physical_id)
class ResourceGroupInfoForResourceGroup(object):
def __init__(self, resource_group_name):
self.resource_group_name = resource_group_name
def __repr__(self):
return 'ResourceGroupInfoForResourceGroup(resource_group_name="{}")'.format(self.resource_group_name)
def __eq__(self, other):
return other.resource_group_name == self.resource_group_name
| 47.61621
| 215
| 0.504856
| 2,706
| 39,950
| 7.069106
| 0.0898
| 0.038685
| 0.034503
| 0.026818
| 0.839668
| 0.825448
| 0.815359
| 0.781327
| 0.743635
| 0.734435
| 0
| 0.00384
| 0.426308
| 39,950
| 838
| 216
| 47.673031
| 0.830795
| 0.012966
| 0
| 0.61693
| 0
| 0
| 0.227885
| 0.104061
| 0
| 0
| 0
| 0
| 0.02726
| 0
| null | null | 0
| 0.012912
| null | null | 0.008608
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 7
|
14b9dd4b4f409944653b3f2c340a45b48623239f
| 9,698
|
py
|
Python
|
meshtastic/tests/test_tunnel.py
|
prampec/Meshtastic-python
|
33ff4e36de578b12a889f9fcc42a1b1017262323
|
[
"Apache-2.0"
] | 1
|
2022-02-20T09:16:25.000Z
|
2022-02-20T09:16:25.000Z
|
meshtastic/tests/test_tunnel.py
|
willy5123456/Meshtastic-python
|
bf879934e61b2d73e052ef34edba18933db62469
|
[
"Apache-2.0"
] | null | null | null |
meshtastic/tests/test_tunnel.py
|
willy5123456/Meshtastic-python
|
bf879934e61b2d73e052ef34edba18933db62469
|
[
"Apache-2.0"
] | 1
|
2022-01-24T17:07:30.000Z
|
2022-01-24T17:07:30.000Z
|
"""Meshtastic unit tests for tunnel.py"""
import re
import sys
import logging
from unittest.mock import patch, MagicMock
import pytest
from ..tcp_interface import TCPInterface
from ..tunnel import Tunnel, onTunnelReceive
from ..globals import Globals
@pytest.mark.unit
@patch('platform.system')
def test_Tunnel_on_non_linux_system(mock_platform_system):
"""Test that we cannot instantiate a Tunnel on a non Linux system"""
a_mock = MagicMock()
a_mock.return_value = 'notLinux'
mock_platform_system.side_effect = a_mock
with patch('socket.socket') as mock_socket:
with pytest.raises(Exception) as pytest_wrapped_e:
iface = TCPInterface(hostname='localhost', noProto=True)
Tunnel(iface)
assert pytest_wrapped_e.type == Exception
assert mock_socket.called
@pytest.mark.unit
@patch('platform.system')
def test_Tunnel_without_interface(mock_platform_system):
"""Test that we can not instantiate a Tunnel without a valid interface"""
a_mock = MagicMock()
a_mock.return_value = 'Linux'
mock_platform_system.side_effect = a_mock
with pytest.raises(Exception) as pytest_wrapped_e:
Tunnel(None)
assert pytest_wrapped_e.type == Exception
@pytest.mark.unitslow
@patch('platform.system')
def test_Tunnel_with_interface(mock_platform_system, caplog, iface_with_nodes):
"""Test that we can not instantiate a Tunnel without a valid interface"""
iface = iface_with_nodes
iface.myInfo.my_node_num = 2475227164
a_mock = MagicMock()
a_mock.return_value = 'Linux'
mock_platform_system.side_effect = a_mock
with caplog.at_level(logging.WARNING):
with patch('socket.socket'):
tun = Tunnel(iface)
assert tun == Globals.getInstance().get_tunnelInstance()
iface.close()
assert re.search(r'Not creating a TapDevice()', caplog.text, re.MULTILINE)
assert re.search(r'Not starting TUN reader', caplog.text, re.MULTILINE)
assert re.search(r'Not sending packet', caplog.text, re.MULTILINE)
@pytest.mark.unitslow
@patch('platform.system')
def test_onTunnelReceive_from_ourselves(mock_platform_system, caplog, iface_with_nodes):
"""Test onTunnelReceive"""
iface = iface_with_nodes
iface.myInfo.my_node_num = 2475227164
sys.argv = ['']
Globals.getInstance().set_args(sys.argv)
packet = {'decoded': { 'payload': 'foo'}, 'from': 2475227164}
a_mock = MagicMock()
a_mock.return_value = 'Linux'
mock_platform_system.side_effect = a_mock
with caplog.at_level(logging.DEBUG):
with patch('socket.socket'):
tun = Tunnel(iface)
Globals.getInstance().set_tunnelInstance(tun)
onTunnelReceive(packet, iface)
assert re.search(r'in onTunnelReceive', caplog.text, re.MULTILINE)
assert re.search(r'Ignoring message we sent', caplog.text, re.MULTILINE)
@pytest.mark.unit
@patch('platform.system')
def test_onTunnelReceive_from_someone_else(mock_platform_system, caplog, iface_with_nodes):
"""Test onTunnelReceive"""
iface = iface_with_nodes
iface.myInfo.my_node_num = 2475227164
sys.argv = ['']
Globals.getInstance().set_args(sys.argv)
packet = {'decoded': { 'payload': 'foo'}, 'from': 123}
a_mock = MagicMock()
a_mock.return_value = 'Linux'
mock_platform_system.side_effect = a_mock
with caplog.at_level(logging.DEBUG):
with patch('socket.socket'):
tun = Tunnel(iface)
Globals.getInstance().set_tunnelInstance(tun)
onTunnelReceive(packet, iface)
assert re.search(r'in onTunnelReceive', caplog.text, re.MULTILINE)
@pytest.mark.unitslow
@patch('platform.system')
def test_shouldFilterPacket_random(mock_platform_system, caplog, iface_with_nodes):
"""Test _shouldFilterPacket()"""
iface = iface_with_nodes
iface.noProto = True
# random packet
packet = b'1234567890123456789012345678901234567890'
a_mock = MagicMock()
a_mock.return_value = 'Linux'
mock_platform_system.side_effect = a_mock
with caplog.at_level(logging.DEBUG):
with patch('socket.socket'):
tun = Tunnel(iface)
ignore = tun._shouldFilterPacket(packet)
assert not ignore
@pytest.mark.unitslow
@patch('platform.system')
def test_shouldFilterPacket_in_blacklist(mock_platform_system, caplog, iface_with_nodes):
"""Test _shouldFilterPacket()"""
iface = iface_with_nodes
iface.noProto = True
# faked IGMP
packet = b'\x00\x00\x00\x00\x00\x00\x00\x00\x00\x02\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00'
a_mock = MagicMock()
a_mock.return_value = 'Linux'
mock_platform_system.side_effect = a_mock
with caplog.at_level(logging.DEBUG):
with patch('socket.socket'):
tun = Tunnel(iface)
ignore = tun._shouldFilterPacket(packet)
assert ignore
@pytest.mark.unitslow
@patch('platform.system')
def test_shouldFilterPacket_icmp(mock_platform_system, caplog, iface_with_nodes):
"""Test _shouldFilterPacket()"""
iface = iface_with_nodes
iface.noProto = True
# faked ICMP
packet = b'\x00\x00\x00\x00\x00\x00\x00\x00\x00\x01\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00'
a_mock = MagicMock()
a_mock.return_value = 'Linux'
mock_platform_system.side_effect = a_mock
with caplog.at_level(logging.DEBUG):
with patch('socket.socket'):
tun = Tunnel(iface)
ignore = tun._shouldFilterPacket(packet)
assert re.search(r'forwarding ICMP message', caplog.text, re.MULTILINE)
assert not ignore
@pytest.mark.unit
@patch('platform.system')
def test_shouldFilterPacket_udp(mock_platform_system, caplog, iface_with_nodes):
"""Test _shouldFilterPacket()"""
iface = iface_with_nodes
iface.noProto = True
# faked UDP
packet = b'\x00\x00\x00\x00\x00\x00\x00\x00\x00\x11\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00'
a_mock = MagicMock()
a_mock.return_value = 'Linux'
mock_platform_system.side_effect = a_mock
with caplog.at_level(logging.DEBUG):
with patch('socket.socket'):
tun = Tunnel(iface)
ignore = tun._shouldFilterPacket(packet)
assert re.search(r'forwarding udp', caplog.text, re.MULTILINE)
assert not ignore
@pytest.mark.unitslow
@patch('platform.system')
def test_shouldFilterPacket_udp_blacklisted(mock_platform_system, caplog, iface_with_nodes):
"""Test _shouldFilterPacket()"""
iface = iface_with_nodes
iface.noProto = True
# faked UDP
packet = b'\x00\x00\x00\x00\x00\x00\x00\x00\x00\x11\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x07\x6c\x07\x6c\x00\x00\x00'
a_mock = MagicMock()
a_mock.return_value = 'Linux'
mock_platform_system.side_effect = a_mock
# Note: custom logging level
LOG_TRACE = 5
with caplog.at_level(LOG_TRACE):
with patch('socket.socket'):
tun = Tunnel(iface)
ignore = tun._shouldFilterPacket(packet)
assert re.search(r'ignoring blacklisted UDP', caplog.text, re.MULTILINE)
assert ignore
@pytest.mark.unit
@patch('platform.system')
def test_shouldFilterPacket_tcp(mock_platform_system, caplog, iface_with_nodes):
"""Test _shouldFilterPacket()"""
iface = iface_with_nodes
iface.noProto = True
# faked TCP
packet = b'\x00\x00\x00\x00\x00\x00\x00\x00\x00\x06\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00'
a_mock = MagicMock()
a_mock.return_value = 'Linux'
mock_platform_system.side_effect = a_mock
with caplog.at_level(logging.DEBUG):
with patch('socket.socket'):
tun = Tunnel(iface)
ignore = tun._shouldFilterPacket(packet)
assert re.search(r'forwarding tcp', caplog.text, re.MULTILINE)
assert not ignore
@pytest.mark.unitslow
@patch('platform.system')
def test_shouldFilterPacket_tcp_blacklisted(mock_platform_system, caplog, iface_with_nodes):
"""Test _shouldFilterPacket()"""
iface = iface_with_nodes
iface.noProto = True
# faked TCP
packet = b'\x00\x00\x00\x00\x00\x00\x00\x00\x00\x06\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x17\x0c\x17\x0c\x00\x00\x00'
a_mock = MagicMock()
a_mock.return_value = 'Linux'
mock_platform_system.side_effect = a_mock
# Note: custom logging level
LOG_TRACE = 5
with caplog.at_level(LOG_TRACE):
with patch('socket.socket'):
tun = Tunnel(iface)
ignore = tun._shouldFilterPacket(packet)
assert re.search(r'ignoring blacklisted TCP', caplog.text, re.MULTILINE)
assert ignore
@pytest.mark.unitslow
@patch('platform.system')
def test_ipToNodeId_none(mock_platform_system, caplog, iface_with_nodes):
"""Test _ipToNodeId()"""
iface = iface_with_nodes
iface.noProto = True
a_mock = MagicMock()
a_mock.return_value = 'Linux'
mock_platform_system.side_effect = a_mock
with caplog.at_level(logging.DEBUG):
with patch('socket.socket'):
tun = Tunnel(iface)
nodeid = tun._ipToNodeId('something not useful')
assert nodeid is None
@pytest.mark.unitslow
@patch('platform.system')
def test_ipToNodeId_all(mock_platform_system, caplog, iface_with_nodes):
"""Test _ipToNodeId()"""
iface = iface_with_nodes
iface.noProto = True
a_mock = MagicMock()
a_mock.return_value = 'Linux'
mock_platform_system.side_effect = a_mock
with caplog.at_level(logging.DEBUG):
with patch('socket.socket'):
tun = Tunnel(iface)
nodeid = tun._ipToNodeId(b'\x00\x00\xff\xff')
assert nodeid == '^all'
| 36.322097
| 124
| 0.694473
| 1,272
| 9,698
| 5.083333
| 0.10456
| 0.113208
| 0.148933
| 0.172595
| 0.882617
| 0.878596
| 0.855243
| 0.837457
| 0.791061
| 0.747448
| 0
| 0.048908
| 0.192514
| 9,698
| 266
| 125
| 36.458647
| 0.776785
| 0.065374
| 0
| 0.748815
| 0
| 0.028436
| 0.156459
| 0.070824
| 0
| 0
| 0
| 0
| 0.113744
| 1
| 0.066351
| false
| 0
| 0.037915
| 0
| 0.104265
| 0
| 0
| 0
| 0
| null | 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 8
|
2101d3227d65f9561ecb469bd08cba0d0c8ab7c7
| 361
|
py
|
Python
|
tests/internal/instance_type/test_instance_type_u_12_auto.py
|
frolovv/aws.ec2.compare
|
582805823492f833d65c0441c4a14dce697c12aa
|
[
"Apache-2.0"
] | null | null | null |
tests/internal/instance_type/test_instance_type_u_12_auto.py
|
frolovv/aws.ec2.compare
|
582805823492f833d65c0441c4a14dce697c12aa
|
[
"Apache-2.0"
] | null | null | null |
tests/internal/instance_type/test_instance_type_u_12_auto.py
|
frolovv/aws.ec2.compare
|
582805823492f833d65c0441c4a14dce697c12aa
|
[
"Apache-2.0"
] | null | null | null |
# Testing module instance_type.u_12
import pytest
import ec2_compare.internal.instance_type.u_12
def test_get_internal_data_instance_type_u_12_get_instances_list():
assert len(ec2_compare.internal.instance_type.u_12.get_instances_list()) > 0
def test_get_internal_data_instance_type_u_12_get():
assert len(ec2_compare.internal.instance_type.u_12.get) > 0
| 36.1
| 78
| 0.853186
| 62
| 361
| 4.467742
| 0.322581
| 0.259928
| 0.281588
| 0.32491
| 0.826715
| 0.826715
| 0.826715
| 0.613718
| 0.613718
| 0.613718
| 0
| 0.050746
| 0.072022
| 361
| 9
| 79
| 40.111111
| 0.776119
| 0.091413
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.333333
| 1
| 0.333333
| true
| 0
| 0.333333
| 0
| 0.666667
| 0
| 0
| 0
| 0
| null | 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 1
| 1
| 0
| 1
| 0
| 1
| 0
|
0
| 12
|
dcf5ccc0e25e317943152106f14e933c0d1b44d2
| 279
|
py
|
Python
|
sympy/solvers/ode/__init__.py
|
75072107/sympy
|
08906f0725c554ca5412631f5ad20aee6fdbfcb2
|
[
"BSD-3-Clause"
] | 1
|
2020-02-12T16:17:59.000Z
|
2020-02-12T16:17:59.000Z
|
sympy/solvers/ode/__init__.py
|
75072107/sympy
|
08906f0725c554ca5412631f5ad20aee6fdbfcb2
|
[
"BSD-3-Clause"
] | null | null | null |
sympy/solvers/ode/__init__.py
|
75072107/sympy
|
08906f0725c554ca5412631f5ad20aee6fdbfcb2
|
[
"BSD-3-Clause"
] | null | null | null |
from .ode import (allhints, checkinfsol, checkodesol, classify_ode,
constantsimp, dsolve, homogeneous_order, infinitesimals)
__all__ = [
'allhints', 'checkinfsol', 'checkodesol', 'classify_ode', 'constantsimp',
'dsolve', 'homogeneous_order', 'infinitesimals',
]
| 34.875
| 77
| 0.724014
| 24
| 279
| 8.083333
| 0.541667
| 0.195876
| 0.309278
| 0.391753
| 0.917526
| 0.917526
| 0.917526
| 0.917526
| 0.917526
| 0.917526
| 0
| 0
| 0.143369
| 279
| 7
| 78
| 39.857143
| 0.811715
| 0
| 0
| 0
| 0
| 0
| 0.326165
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| false
| 0
| 0.166667
| 0
| 0.166667
| 0
| 1
| 0
| 0
| null | 0
| 1
| 1
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 10
|
0d386ffb194b907196387e4174b6a21b0120d2f0
| 251,553
|
py
|
Python
|
unit_tests/Corekaburra_test.py
|
milnus/Corekaburra
|
ac96c30f795d7e1b7132811d2e4a484d0ef5a6fc
|
[
"MIT"
] | 1
|
2022-01-27T02:45:22.000Z
|
2022-01-27T02:45:22.000Z
|
unit_tests/Corekaburra_test.py
|
milnus/Corekaburra
|
ac96c30f795d7e1b7132811d2e4a484d0ef5a6fc
|
[
"MIT"
] | null | null | null |
unit_tests/Corekaburra_test.py
|
milnus/Corekaburra
|
ac96c30f795d7e1b7132811d2e4a484d0ef5a6fc
|
[
"MIT"
] | null | null | null |
'''
Unit tests for Corekaburra.
Usage: python -m unittest -v Corekaburra_test
'''
# import
import unittest
import os
from shutil import copyfile
import logging
from networkx import number_connected_components, connected_components
# pylint: disable=no-name-in-module
# import Corekaburra functions
from Corekaburra import exit_with_error
from Corekaburra import read_complete_genome_file
from Corekaburra import check_inputs
from Corekaburra import parse_gene_presence_absence
from Corekaburra import gff_parser
from Corekaburra import merge_dicts
from Corekaburra import consesus_core_genome
from Corekaburra import summary_table
from Corekaburra import output_writer_functions
from Corekaburra import correct_gffs
# move to folder with mock files. First try Github structure, then try pulled repository structure
try:
os.chdir('/Corekaburra/unit_tests/unit_test_data/')
except FileNotFoundError:
os.chdir('unit_test_data/')
class TestExitWithError(unittest.TestCase):
""" Test for the function carrying out a nice exit """
@classmethod
def setUpClass(cls):
cls.logger = logging.getLogger('test_logger.log')
cls.logger.setLevel(logging.INFO)
def test_exit_w_tmp_folder_deletion(self):
''' Test the exit function is able to remove the temporary folder '''
# copy the placeholder tmp folder to replace it afterwards
tmp_folder = 'TestExitWithError/tmp_folder'
tmp_folder_copy = 'TestExitWithError/tmp_folder_copy'
os.mkdir(tmp_folder_copy)
tmp_files = os.listdir(tmp_folder)
for file in tmp_files:
copyfile(os.path.join(tmp_folder, file), os.path.join(tmp_folder_copy, file))
with self.assertRaises(SystemExit):
exit_with_error.exit_with_error(exit_status=2, message='test msg', logger=self.logger, tmp_folder=tmp_folder)
os.rename(tmp_folder_copy, tmp_folder)
class TestCutOffViolations(unittest.TestCase):
""" Test for the function that examines the cutoffs given for core and low-frequency genes"""
@classmethod
def setUpClass(cls):
cls.logger = logging.getLogger('test_logger.log')
cls.logger.setLevel(logging.INFO)
def test_low_below_range(self):
with self.assertRaises(SystemExit):
check_inputs.check_cutoffs(-0.1, 1, self.logger)
def test_core_above_range(self):
with self.assertRaises(SystemExit):
check_inputs.check_cutoffs(0.05, 1.1, self.logger)
def test_low_larger_than_core(self):
with self.assertRaises(SystemExit):
check_inputs.check_cutoffs(0.6, 0.4, self.logger)
class TestParsingCompleteGenomes(unittest.TestCase):
""" Test for the passing of input file containing names of complete genome and checking their presence in the pan-genome """
@classmethod
def setUpClass(cls):
cls.logger = logging.getLogger('test_logger.log')
cls.logger.setLevel(logging.INFO)
def test_all_files_found(self):
gff_files = ['/path/to/complete_genome_1.gff',
'/path/complete_genome_2.gff.gz',
'complete_genome_3.gff.gz',
'complete_genome_4.gff',
'complete_genome_5',
'dummy_index_1',
'dummy_index_2']
complete_genome_file = 'TestParsingCompleteGenomes/complete_genomes_file.txt'
expected_return = ['complete_genome_1',
'complete_genome_2',
'complete_genome_3',
'complete_genome_4',
'complete_genome_5']
return_object = read_complete_genome_file.parse_complete_genome_file(complete_genome_file, gff_files, self.logger)
self.assertEqual(return_object, expected_return)
def test_correct_one_files_not_found(self):
gff_files = ['/path/complete_genome_2.gff.gz',
'complete_genome_3.gff.gz',
'complete_genome_4.gff',
'complete_genome_5',
'dummy_index_1',
'dummy_index_2']
complete_genome_file = 'TestParsingCompleteGenomes/complete_genomes_file.txt'
with self.assertRaises(SystemExit):
read_complete_genome_file.parse_complete_genome_file(complete_genome_file,
gff_files, self.logger)
class TestPangenomeSourceProgram(unittest.TestCase):
""" Test of the function that determines the program from which the pan-genome originated """
@classmethod
def setUpClass(cls):
cls.logger = logging.getLogger('test_logger.log')
cls.logger.setLevel(logging.INFO)
def test_roary_input(self):
input_folder_path = 'TestPangenomeSourceProgram/Mock_roary'
return_program, return_path = check_inputs.define_pangenome_program(input_folder_path, self.logger)
self.assertEqual("Roary", return_program)
self.assertEqual(input_folder_path + '/gene_presence_absence.csv', return_path)
def test_panaroo_input(self):
input_folder_path = 'TestPangenomeSourceProgram/Mock_panaroo'
return_program, return_path = check_inputs.define_pangenome_program(input_folder_path, self.logger)
self.assertEqual("Panaroo", return_program)
self.assertEqual(input_folder_path + '/gene_presence_absence_roary.csv', return_path)
def test_minimal_panaroo_input(self):
input_folder_path = 'TestPangenomeSourceProgram/Mock_minimal_panaroo'
return_program, return_path = check_inputs.define_pangenome_program(input_folder_path, self.logger)
self.assertEqual("Panaroo", return_program)
self.assertEqual(input_folder_path + '/gene_presence_absence_roary.csv', return_path)
# def test_pirate_input(self): TODO - Make Corekaburra take Pirate input!
# pass
# input_folder_path = 'TestPangenomeSourceProgram/Mock_pirate'
#
# return_program, return_path = check_inputs.define_pangenome_program(input_folder_path)
#
# self.assertEqual("Pirate", return_program)
def test_unknown_input(self):
input_folder_path = 'TestPangenomeSourceProgram/Mock_unknwon'
with self.assertRaises(SystemExit):
check_inputs.define_pangenome_program(input_folder_path, self.logger)
class TestPresenceOfGenedataFile(unittest.TestCase):
""" Test the function that ensures the presence of the Gene_data.csv file produced by Panaroo """
@classmethod
def setUpClass(cls):
cls.logger = logging.getLogger('test_logger.log')
cls.logger.setLevel(logging.INFO)
def test_Genedata_File_present(self):
input_folder_path = 'TestPresenceOfGenedataFile/present'
return_path = check_inputs.check_gene_data(input_folder_path, self.logger)
self.assertEqual(return_path, input_folder_path +'/gene_data.csv')
def test_Genedata_File_absent(self):
input_folder_path = 'TestPresenceOfGenedataFile/absent'
with self.assertRaises(SystemExit):
check_inputs.check_gene_data(input_folder_path, self.logger)
class TestPresenceOfGffsInPresAbsFile(unittest.TestCase):
@classmethod
def setUpClass(cls):
cls.logger = logging.getLogger('test_logger.log')
cls.logger.setLevel(logging.INFO)
""" Test the function that ensures all gffs given as input are included in the pan-genome provided """
# Test pairing of all files in pan genome
def test_input_gff_pres_abs_pairing_all_gffs(self):
input_pres_abs = 'TestPresenceOfGffsInPresAbsFile/gene_presence_absence_roary.csv'
input_file_list = ['Silas_the_Salmonella', 'Christina_the_Streptococcus', 'Ajwa_the_Shigella']
return_bool = check_inputs.check_gff_in_pan(input_file_list, input_pres_abs, self.logger)
self.assertEqual(return_bool, True)
# Test pairing of some files in pan genome - Warning
def test_input_gff_pres_abs_pairing_some(self):
input_pres_abs = 'TestPresenceOfGffsInPresAbsFile/gene_presence_absence_roary.csv'
input_file_list = ['Silas_the_Salmonella.gff', 'Christina_the_Streptococcus.gff']
return_bool = check_inputs.check_gff_in_pan(input_file_list, input_pres_abs, self.logger)
self.assertEqual(return_bool, True)
# Test when given a file not in pan genome among others that are in the pan genome
def test_input_gff_pres_abs_file_not_in_pan(self):
input_pres_abs = 'TestPresenceOfGffsInPresAbsFile/gene_presence_absence_roary.csv'
input_file_list = ['not_found.gff', 'Silas_the_Salmonella.gff', 'Christina_the_Streptococcus.gff']
with self.assertRaises(SystemExit):
check_inputs.check_gff_in_pan(input_file_list, input_pres_abs, self.logger)
def test_input_gff_pres_abs_some_file_not_in_pan(self):
input_pres_abs = 'TestPresenceOfGffsInPresAbsFile/gene_presence_absence_roary.csv'
input_file_list = ['not_found.gff', 'also_not_found.gff', 'definitely_not_found.gff']
with self.assertRaises(SystemExit):
check_inputs.check_gff_in_pan(input_file_list, input_pres_abs, self.logger)
class TestAddingGeneToDict(unittest.TestCase):
"""
Tests of the function that adds a gene to the dict used to holds that class (core, accessory of low-frequency)
"""
def test_adding_gene(self):
main_dict = {'Test_genome': {}}
gene = 'Test_gene_from_genome'
pan_gene_name = 'Test_pan_gene'
genome = 'Test_genome'
expected_return = {'Test_genome': {'Test_gene_from_genome': 'Test_pan_gene'}}
return_dict = parse_gene_presence_absence.add_gene_to_dict(main_dict, gene, pan_gene_name, genome)
self.assertEqual(expected_return, return_dict)
def test_adding_additional_gene(self):
main_dict = {'Test_genome': {'Test_gene_from_genome': 'Test_pan_gene'}}
gene = 'Test_gene_from_genome_2'
pan_gene_name = 'Test_pan_gene_2'
genome = 'Test_genome'
expected_return = {'Test_genome': {'Test_gene_from_genome': 'Test_pan_gene',
'Test_gene_from_genome_2': 'Test_pan_gene_2'}}
return_dict = parse_gene_presence_absence.add_gene_to_dict(main_dict, gene, pan_gene_name, genome)
self.assertEqual(expected_return, return_dict)
class TestCheckingFragmentedGenes(unittest.TestCase):
"""
Test of the function that examines the placement of a potential core gene's placement, if it is fragmented in at least one genome.
"""
@classmethod
def setUpClass(cls):
cls.logger = logging.getLogger('test_logger.log')
cls.logger.setLevel(logging.INFO)
def tearDown(self):
""" Class to remove created database files of gff files in tmp-folder"""
for file in os.listdir('test_tmp_folder'):
if "_db" in file:
db_path = os.path.join('test_tmp_folder', file)
os.remove(db_path)
def test_fragmented_gene_true(self):
""" Gene is fragmented but found next to each other with nothing in between """
fragments_info = [['Silas_the_Salmonella_tag-1-2.1;Silas_the_Salmonella_tag-1-2.2', 'Silas_the_Salmonella']]
input_gffs =['TestParsingGenePresenceAbsenceFile/Silas_the_Salmonella.gff',
'TestParsingGenePresenceAbsenceFile/Christina_the_Streptococcus.gff',
'TestParsingGenePresenceAbsenceFile/Ajwa_the_Shigella.gff',
'TestParsingGenePresenceAbsenceFile/Ajwa_the_Legionella.gff',
'TestParsingGenePresenceAbsenceFile/Cari_the_Listeria.gff',
'TestParsingGenePresenceAbsenceFile/Aman_the_Streptococcus.gff',
'TestParsingGenePresenceAbsenceFile/Zion_the_Streptococcus.gff',
'TestParsingGenePresenceAbsenceFile/Dina_the_Shigella.gff',
'TestParsingGenePresenceAbsenceFile/Silas_the_Legionella.gff',
'TestParsingGenePresenceAbsenceFile/Lilly_the_Shigella.gff']
tmp_folder_path = 'test_tmp_folder'
gene_data_file = {}
corrected_dir = ''
expected_return = [True]
return_bool = parse_gene_presence_absence.check_fragmented_gene(fragments_info, input_gffs, tmp_folder_path,
gene_data_file, corrected_dir, self.logger)
self.assertEqual(expected_return, return_bool)
def test_fragmented_gene_fasle(self):
""" Gene is fragmented but found next to each other with another gene in between """
fragments_info = [['Silas_the_Salmonella_tag-1-5.1;Silas_the_Salmonella_tag-1-5.2', 'Silas_the_Salmonella']]
input_gffs = ['TestParsingGenePresenceAbsenceFile/Silas_the_Salmonella.gff',
'TestParsingGenePresenceAbsenceFile/Christina_the_Streptococcus.gff',
'TestParsingGenePresenceAbsenceFile/Ajwa_the_Shigella.gff',
'TestParsingGenePresenceAbsenceFile/Ajwa_the_Legionella.gff',
'TestParsingGenePresenceAbsenceFile/Cari_the_Listeria.gff',
'TestParsingGenePresenceAbsenceFile/Aman_the_Streptococcus.gff',
'TestParsingGenePresenceAbsenceFile/Zion_the_Streptococcus.gff',
'TestParsingGenePresenceAbsenceFile/Dina_the_Shigella.gff',
'TestParsingGenePresenceAbsenceFile/Silas_the_Legionella.gff',
'TestParsingGenePresenceAbsenceFile/Lilly_the_Shigella.gff']
tmp_folder_path = 'test_tmp_folder'
gene_data_file = {}
corrected_dir = ''
expected_return = [False]
return_bool = parse_gene_presence_absence.check_fragmented_gene(fragments_info, input_gffs, tmp_folder_path,
gene_data_file, corrected_dir, self.logger)
self.assertEqual(expected_return, return_bool)
def test_fragmented_gene_mutiple_genes_fasle(self):
""" Two genes fragmented with one having nothing and the other having something in between fragments """
fragment_info = [['Silas_the_Salmonella_tag-1-2.1;Silas_the_Salmonella_tag-1-2.2', 'Silas_the_Salmonella'],
['Silas_the_Salmonella_tag-1-5.1;Silas_the_Salmonella_tag-1-5.2', 'Silas_the_Salmonella']]
input_gffs = ['TestParsingGenePresenceAbsenceFile/Silas_the_Salmonella.gff',
'TestParsingGenePresenceAbsenceFile/Christina_the_Streptococcus.gff',
'TestParsingGenePresenceAbsenceFile/Ajwa_the_Shigella.gff',
'TestParsingGenePresenceAbsenceFile/Ajwa_the_Legionella.gff',
'TestParsingGenePresenceAbsenceFile/Cari_the_Listeria.gff',
'TestParsingGenePresenceAbsenceFile/Aman_the_Streptococcus.gff',
'TestParsingGenePresenceAbsenceFile/Zion_the_Streptococcus.gff',
'TestParsingGenePresenceAbsenceFile/Dina_the_Shigella.gff',
'TestParsingGenePresenceAbsenceFile/Silas_the_Legionella.gff',
'TestParsingGenePresenceAbsenceFile/Lilly_the_Shigella.gff']
tmp_folder_path = 'test_tmp_folder'
gene_data_file = {}
corrected_dir = ''
expected_return = [True, False]
return_bool = parse_gene_presence_absence.check_fragmented_gene(fragment_info, input_gffs, tmp_folder_path,
gene_data_file, corrected_dir, self.logger)
self.assertEqual(expected_return, return_bool)
def test_fragments_on_separate_contigs(self):
""" One gene fragmented with parts on separate contigs """
fragments_info = [['Silas_the_Salmonella_tag-1-2.1;Silas_the_Salmonella_tag-1-2.2', 'Silas_the_Salmonella'],
['Silas_the_Salmonella_tag-1-5.1;Silas_the_Salmonella_tag-1-5.2', 'Silas_the_Salmonella']]
input_gffs = ['TestCheckingFragmentedGenes/Silas_the_Salmonella.gff',
'TestCheckingFragmentedGenes/Zion_the_Streptococcus.gff',
'TestCheckingFragmentedGenes/Silas_the_Legionella.gff',
'TestCheckingFragmentedGenes/Lilly_the_Shigella.gff']
tmp_folder_path = 'test_tmp_folder'
gene_data_file = {}
corrected_dir = ''
expected_return = [False, False]
return_bool = parse_gene_presence_absence.check_fragmented_gene(fragments_info, input_gffs, tmp_folder_path,
gene_data_file, corrected_dir, self.logger)
self.assertEqual(expected_return, return_bool)
class TestParsingGenePresenceAbsenceFile(unittest.TestCase):
"""
Tests for the function that passes the gene presence absence table from pan-genome program
"""
@classmethod
def setUpClass(cls):
cls.logger = logging.getLogger('test_logger.log')
cls.logger.setLevel(logging.INFO)
def tearDown(self):
try:
os.remove('TestParsingGenePresenceAbsenceFile/Silas_the_Salmonella_w_refound_db')
except FileNotFoundError:
pass
try:
for file in os.listdir('TestParsingGenePresenceAbsenceFile/Corrected_gffs/'):
if '.gff' in file:
os.remove(os.path.join('TestParsingGenePresenceAbsenceFile/Corrected_gffs/', file))
except FileNotFoundError:
pass
def test_parsing_w_100_presence(self):
file_name = 'TestParsingGenePresenceAbsenceFile/gene_presence_absence_roary.csv'
core_gene_presence = 1
low_freq_gene = 0.1
source_program = 'Panaroo'
input_gffs = ['TestParsingGenePresenceAbsenceFile/Silas_the_Salmonella.gff',
'TestParsingGenePresenceAbsenceFile/Christina_the_Streptococcus.gff',
'TestParsingGenePresenceAbsenceFile/Ajwa_the_Shigella.gff',
'TestParsingGenePresenceAbsenceFile/Ajwa_the_Legionella.gff',
'TestParsingGenePresenceAbsenceFile/Cari_the_Listeria.gff',
'TestParsingGenePresenceAbsenceFile/Aman_the_Streptococcus.gff',
'TestParsingGenePresenceAbsenceFile/Zion_the_Streptococcus.gff',
'TestParsingGenePresenceAbsenceFile/Dina_the_Shigella.gff',
'TestParsingGenePresenceAbsenceFile/Silas_the_Legionella.gff',
'TestParsingGenePresenceAbsenceFile/Lilly_the_Shigella.gff']
tmp_folder_path = 'TestParsingGenePresenceAbsenceFile/'
gene_data_file = {}
corrected_dir = ''
expected_core_gene_dict = {'Silas_the_Salmonella': {'Silas_the_Salmonella_tag-1-1': "A",
'Silas_the_Salmonella_tag-1-2.1': "B",
'Silas_the_Salmonella_tag-1-2.2': "B"},
'Christina_the_Streptococcus': {'Christina_the_Streptococcus_tag-2-1': "A",
'Christina_the_Streptococcus_tag-2-2': "B"},
'Ajwa_the_Shigella': {'Ajwa_the_Shigella_tag-3-1': "A",
'Ajwa_the_Shigella_tag-3-2': "B"},
'Ajwa_the_Legionella': {'Ajwa_the_Legionella_tag-4-1': "A",
'Ajwa_the_Legionella_tag-4-2': "B"},
'Cari_the_Listeria': {'Cari_the_Listeria_tag-5-1': "A",
'Cari_the_Listeria_tag-5-2': "B"},
'Aman_the_Streptococcus': {'Aman_the_Streptococcus_tag-6-1': "A",
'Aman_the_Streptococcus_tag-6-2': "B"},
'Zion_the_Streptococcus': {'Zion_the_Streptococcus_tag-7-1': "A",
'Zion_the_Streptococcus_tag-7-2': "B"},
'Dina_the_Shigella': {'Dina_the_Shigella_tag-8-1': "A",
'Dina_the_Shigella_tag-8-2': "B"},
'Silas_the_Legionella': {'Silas_the_Legionella_tag-9-1': "A",
'Silas_the_Legionella_tag-9-2': "B"},
'Lilly_the_Shigella': {'Lilly_the_Shigella_tag-10-1': "A",
'Lilly_the_Shigella_tag-10-2': "B"}}
expected_low_freq_gene_dict = {'Silas_the_Salmonella': {'Silas_the_Salmonella_tag-1-7': "G"},
'Christina_the_Streptococcus': {},
'Ajwa_the_Shigella': {},
'Ajwa_the_Legionella': {},
'Cari_the_Listeria': {},
'Aman_the_Streptococcus': {},
'Zion_the_Streptococcus': {},
'Dina_the_Shigella': {},
'Silas_the_Legionella': {},
'Lilly_the_Shigella': {'Lilly_the_Shigella_tag-10-6': "F"}}
expected_acc_gene_dict = {'Silas_the_Salmonella': {'Silas_the_Salmonella_tag-1-3': 'C',
'Silas_the_Salmonella_tag-1-4.1': 'D',
'Silas_the_Salmonella_tag-1-4.2': 'D',
'Silas_the_Salmonella_tag-1-5.1': 'E',
'Silas_the_Salmonella_tag-1-5.2': 'E'},
'Christina_the_Streptococcus': {'Christina_the_Streptococcus_tag-2-3': "C",
'Christina_the_Streptococcus_tag-2-4': "D",
'Christina_the_Streptococcus_tag-2-5': "E"},
'Ajwa_the_Shigella': {"Ajwa_the_Shigella_tag-3-3": "C",
"Ajwa_the_Shigella_tag-3-4": "D",
"Ajwa_the_Shigella_tag-3-5": "E"},
'Ajwa_the_Legionella': {'Ajwa_the_Legionella_tag-4-3': "C",
'Ajwa_the_Legionella_tag-4-4': "D",
'Ajwa_the_Legionella_tag-4-5': "E"},
'Cari_the_Listeria': {"Cari_the_Listeria_tag-5-3": "C",
"Cari_the_Listeria_tag-5-4": "D",
"Cari_the_Listeria_tag-5-5": "E"},
'Aman_the_Streptococcus': {"Aman_the_Streptococcus_tag-6-3": "C",
"Aman_the_Streptococcus_tag-6-4": "D",
"Aman_the_Streptococcus_tag-6-5": "E"},
'Zion_the_Streptococcus': {"Zion_the_Streptococcus_tag-7-3": "C",
"Zion_the_Streptococcus_tag-7-4": "D",
"Zion_the_Streptococcus_tag-7-5": "E"},
'Dina_the_Shigella': {"Dina_the_Shigella_tag-8-3": "C",
"Dina_the_Shigella_tag-8-4": "D",
"Dina_the_Shigella_tag-8-5": "E"},
'Silas_the_Legionella': {"Silas_the_Legionella_tag-9-3": "C",
"Silas_the_Legionella_tag-9-4": "D",
"Silas_the_Legionella_tag-9-5": "E"},
'Lilly_the_Shigella': {'Lilly_the_Shigella_tag-10-5': "E"}}
core_gene_dict, low_freq_gene_dict, \
acc_gene_dict = \
parse_gene_presence_absence.read_gene_presence_absence(
file_name, core_gene_presence,
low_freq_gene, source_program,
input_gffs, tmp_folder_path, gene_data_file, corrected_dir, self.logger)
self.assertEqual(expected_core_gene_dict, core_gene_dict)
self.assertEqual(expected_low_freq_gene_dict, low_freq_gene_dict)
self.assertEqual(expected_acc_gene_dict, acc_gene_dict)
def test_parsing_w_100_presence_roary(self):
file_name = 'TestParsingGenePresenceAbsenceFile/gene_presence_absence.csv'
core_gene_presence = 1
low_freq_gene = 0.1
source_program = 'Roary'
input_gffs = ['TestParsingGenePresenceAbsenceFile/Silas_the_Salmonella.gff',
'TestParsingGenePresenceAbsenceFile/Christina_the_Streptococcus.gff',
'TestParsingGenePresenceAbsenceFile/Ajwa_the_Shigella.gff',
'TestParsingGenePresenceAbsenceFile/Ajwa_the_Legionella.gff',
'TestParsingGenePresenceAbsenceFile/Cari_the_Listeria.gff',
'TestParsingGenePresenceAbsenceFile/Aman_the_Streptococcus.gff',
'TestParsingGenePresenceAbsenceFile/Zion_the_Streptococcus.gff',
'TestParsingGenePresenceAbsenceFile/Dina_the_Shigella.gff',
'TestParsingGenePresenceAbsenceFile/Silas_the_Legionella.gff',
'TestParsingGenePresenceAbsenceFile/Lilly_the_Shigella.gff']
tmp_folder_path = 'TestParsingGenePresenceAbsenceFile/'
gene_data_file = {}
corrected_dir = ''
core_gene_dict, low_freq_gene_dict, \
acc_gene_dict = \
parse_gene_presence_absence.read_gene_presence_absence(
file_name, core_gene_presence,
low_freq_gene, source_program,
input_gffs, tmp_folder_path, gene_data_file, corrected_dir, self.logger)
expected_core_gene_dict = {'Silas_the_Salmonella': {'Silas_the_Salmonella_tag-1-1': "A",
'Silas_the_Salmonella_tag-1-2.1': "B",
'Silas_the_Salmonella_tag-1-2.2': "B"},
'Christina_the_Streptococcus': {'Christina_the_Streptococcus_tag-2-1': "A",
'Christina_the_Streptococcus_tag-2-2': "B"},
'Ajwa_the_Shigella': {'Ajwa_the_Shigella_tag-3-1': "A",
'Ajwa_the_Shigella_tag-3-2': "B"},
'Ajwa_the_Legionella': {'Ajwa_the_Legionella_tag-4-1': "A",
'Ajwa_the_Legionella_tag-4-2': "B"},
'Cari_the_Listeria': {'Cari_the_Listeria_tag-5-1': "A",
'Cari_the_Listeria_tag-5-2': "B"},
'Aman_the_Streptococcus': {'Aman_the_Streptococcus_tag-6-1': "A",
'Aman_the_Streptococcus_tag-6-2': "B"},
'Zion_the_Streptococcus': {'Zion_the_Streptococcus_tag-7-1': "A",
'Zion_the_Streptococcus_tag-7-2': "B"},
'Dina_the_Shigella': {'Dina_the_Shigella_tag-8-1': "A",
'Dina_the_Shigella_tag-8-2': "B"},
'Silas_the_Legionella': {'Silas_the_Legionella_tag-9-1': "A",
'Silas_the_Legionella_tag-9-2': "B"},
'Lilly_the_Shigella': {'Lilly_the_Shigella_tag-10-1': "A",
'Lilly_the_Shigella_tag-10-2': "B"}}
expected_low_freq_gene_dict = {'Silas_the_Salmonella': {'Silas_the_Salmonella_tag-1-7': "G"},
'Christina_the_Streptococcus': {},
'Ajwa_the_Shigella': {},
'Ajwa_the_Legionella': {},
'Cari_the_Listeria': {},
'Aman_the_Streptococcus': {},
'Zion_the_Streptococcus': {},
'Dina_the_Shigella': {},
'Silas_the_Legionella': {},
'Lilly_the_Shigella': {'Lilly_the_Shigella_tag-10-6': "F"}}
expected_acc_gene_dict = {'Silas_the_Salmonella': {'Silas_the_Salmonella_tag-1-3': 'C',
'Silas_the_Salmonella_tag-1-4.1': 'D',
'Silas_the_Salmonella_tag-1-4.2': 'D',
'Silas_the_Salmonella_tag-1-5.1': 'E',
'Silas_the_Salmonella_tag-1-5.2': 'E'},
'Christina_the_Streptococcus': {'Christina_the_Streptococcus_tag-2-3': "C",
'Christina_the_Streptococcus_tag-2-4': "D",
'Christina_the_Streptococcus_tag-2-5': "E"},
'Ajwa_the_Shigella': {"Ajwa_the_Shigella_tag-3-3": "C",
"Ajwa_the_Shigella_tag-3-4": "D",
"Ajwa_the_Shigella_tag-3-5": "E"},
'Ajwa_the_Legionella': {'Ajwa_the_Legionella_tag-4-3': "C",
'Ajwa_the_Legionella_tag-4-4': "D",
'Ajwa_the_Legionella_tag-4-5': "E"},
'Cari_the_Listeria': {"Cari_the_Listeria_tag-5-3": "C",
"Cari_the_Listeria_tag-5-4": "D",
"Cari_the_Listeria_tag-5-5": "E"},
'Aman_the_Streptococcus': {"Aman_the_Streptococcus_tag-6-3": "C",
"Aman_the_Streptococcus_tag-6-4": "D",
"Aman_the_Streptococcus_tag-6-5": "E"},
'Zion_the_Streptococcus': {"Zion_the_Streptococcus_tag-7-3": "C",
"Zion_the_Streptococcus_tag-7-4": "D",
"Zion_the_Streptococcus_tag-7-5": "E"},
'Dina_the_Shigella': {"Dina_the_Shigella_tag-8-3": "C",
"Dina_the_Shigella_tag-8-4": "D",
"Dina_the_Shigella_tag-8-5": "E"},
'Silas_the_Legionella': {"Silas_the_Legionella_tag-9-3": "C",
"Silas_the_Legionella_tag-9-4": "D",
"Silas_the_Legionella_tag-9-5": "E"},
'Lilly_the_Shigella': {'Lilly_the_Shigella_tag-10-5': "E"}}
self.assertEqual(expected_core_gene_dict, core_gene_dict)
self.assertEqual(expected_low_freq_gene_dict, low_freq_gene_dict)
self.assertEqual(expected_acc_gene_dict, acc_gene_dict)
def test_parsing_w_90_presence(self):
file_name = 'TestParsingGenePresenceAbsenceFile/gene_presence_absence_roary.csv'
core_gene_presence = 0.9
low_freq_gene = 0.1
source_program = 'Panaroo'
input_gffs = ['TestParsingGenePresenceAbsenceFile/Silas_the_Salmonella.gff',
'TestParsingGenePresenceAbsenceFile/Christina_the_Streptococcus.gff',
'TestParsingGenePresenceAbsenceFile/Ajwa_the_Shigella.gff',
'TestParsingGenePresenceAbsenceFile/Ajwa_the_Legionella.gff',
'TestParsingGenePresenceAbsenceFile/Cari_the_Listeria.gff',
'TestParsingGenePresenceAbsenceFile/Aman_the_Streptococcus.gff',
'TestParsingGenePresenceAbsenceFile/Zion_the_Streptococcus.gff',
'TestParsingGenePresenceAbsenceFile/Dina_the_Shigella.gff',
'TestParsingGenePresenceAbsenceFile/Silas_the_Legionella.gff',
'TestParsingGenePresenceAbsenceFile/Lilly_the_Shigella.gff']
tmp_folder_path = 'TestParsingGenePresenceAbsenceFile/'
gene_data_file = {}
corrected_dir = ''
core_gene_dict, low_freq_gene_dict, \
acc_gene_dict = \
parse_gene_presence_absence.read_gene_presence_absence(
file_name, core_gene_presence,
low_freq_gene, source_program,
input_gffs, tmp_folder_path, gene_data_file, corrected_dir, self.logger)
expected_core_gene_dict = {'Silas_the_Salmonella': {'Silas_the_Salmonella_tag-1-1': "A",
'Silas_the_Salmonella_tag-1-2.1': "B",
'Silas_the_Salmonella_tag-1-2.2': "B",
'Silas_the_Salmonella_tag-1-3': 'C',
'Silas_the_Salmonella_tag-1-4.1': 'D',
'Silas_the_Salmonella_tag-1-4.2': 'D',},
'Christina_the_Streptococcus': {'Christina_the_Streptococcus_tag-2-1': "A",
'Christina_the_Streptococcus_tag-2-2': "B",
'Christina_the_Streptococcus_tag-2-3': "C",
'Christina_the_Streptococcus_tag-2-4': "D"},
'Ajwa_the_Shigella': {'Ajwa_the_Shigella_tag-3-1': "A",
'Ajwa_the_Shigella_tag-3-2': "B",
"Ajwa_the_Shigella_tag-3-3": "C",
"Ajwa_the_Shigella_tag-3-4": "D"},
'Ajwa_the_Legionella': {'Ajwa_the_Legionella_tag-4-1': "A",
'Ajwa_the_Legionella_tag-4-2': "B",
'Ajwa_the_Legionella_tag-4-3': "C",
'Ajwa_the_Legionella_tag-4-4': "D"},
'Cari_the_Listeria': {"Cari_the_Listeria_tag-5-3": "C",
"Cari_the_Listeria_tag-5-4": "D",
'Cari_the_Listeria_tag-5-1': "A",
'Cari_the_Listeria_tag-5-2': "B"},
'Aman_the_Streptococcus': {'Aman_the_Streptococcus_tag-6-1': "A",
'Aman_the_Streptococcus_tag-6-2': "B",
"Aman_the_Streptococcus_tag-6-3": "C",
"Aman_the_Streptococcus_tag-6-4": "D"},
'Zion_the_Streptococcus': {"Zion_the_Streptococcus_tag-7-3": "C",
"Zion_the_Streptococcus_tag-7-4": "D",
'Zion_the_Streptococcus_tag-7-1': "A",
'Zion_the_Streptococcus_tag-7-2': "B"},
'Dina_the_Shigella': {"Dina_the_Shigella_tag-8-3": "C",
"Dina_the_Shigella_tag-8-4": "D",
'Dina_the_Shigella_tag-8-1': "A",
'Dina_the_Shigella_tag-8-2': "B"},
'Silas_the_Legionella': {"Silas_the_Legionella_tag-9-3": "C",
"Silas_the_Legionella_tag-9-4": "D",
'Silas_the_Legionella_tag-9-1': "A",
'Silas_the_Legionella_tag-9-2': "B"},
'Lilly_the_Shigella': {'Lilly_the_Shigella_tag-10-1': "A",
'Lilly_the_Shigella_tag-10-2': "B"}}
expected_low_freq_gene_dict = {'Silas_the_Salmonella': {'Silas_the_Salmonella_tag-1-7': "G"},
'Christina_the_Streptococcus': {},
'Ajwa_the_Shigella': {},
'Ajwa_the_Legionella': {},
'Cari_the_Listeria': {},
'Aman_the_Streptococcus': {},
'Zion_the_Streptococcus': {},
'Dina_the_Shigella': {},
'Silas_the_Legionella': {},
'Lilly_the_Shigella': {'Lilly_the_Shigella_tag-10-6': "F"}}
expected_acc_gene_dict = {'Silas_the_Salmonella': {'Silas_the_Salmonella_tag-1-5.1': 'E',
'Silas_the_Salmonella_tag-1-5.2': 'E'},
'Christina_the_Streptococcus': {'Christina_the_Streptococcus_tag-2-5': "E"},
'Ajwa_the_Shigella': {"Ajwa_the_Shigella_tag-3-5": "E"},
'Ajwa_the_Legionella': {'Ajwa_the_Legionella_tag-4-5': "E"},
'Cari_the_Listeria': {"Cari_the_Listeria_tag-5-5": "E"},
'Aman_the_Streptococcus': {"Aman_the_Streptococcus_tag-6-5": "E"},
'Zion_the_Streptococcus': {"Zion_the_Streptococcus_tag-7-5": "E"},
'Dina_the_Shigella': {"Dina_the_Shigella_tag-8-5": "E"},
'Silas_the_Legionella': {"Silas_the_Legionella_tag-9-5": "E"},
'Lilly_the_Shigella': {'Lilly_the_Shigella_tag-10-5': "E"}}
self.assertEqual(expected_core_gene_dict, core_gene_dict)
self.assertEqual(expected_low_freq_gene_dict, low_freq_gene_dict)
self.assertEqual(expected_acc_gene_dict, acc_gene_dict)
def test_parsing_w_90_presence_roary(self):
file_name = 'TestParsingGenePresenceAbsenceFile/gene_presence_absence.csv'
core_gene_presence = 0.90
low_freq_gene = 0.1
source_program = 'Roary'
input_gffs = ['TestParsingGenePresenceAbsenceFile/Silas_the_Salmonella.gff',
'TestParsingGenePresenceAbsenceFile/Christina_the_Streptococcus.gff',
'TestParsingGenePresenceAbsenceFile/Ajwa_the_Shigella.gff',
'TestParsingGenePresenceAbsenceFile/Ajwa_the_Legionella.gff',
'TestParsingGenePresenceAbsenceFile/Cari_the_Listeria.gff',
'TestParsingGenePresenceAbsenceFile/Aman_the_Streptococcus.gff',
'TestParsingGenePresenceAbsenceFile/Zion_the_Streptococcus.gff',
'TestParsingGenePresenceAbsenceFile/Dina_the_Shigella.gff',
'TestParsingGenePresenceAbsenceFile/Silas_the_Legionella.gff',
'TestParsingGenePresenceAbsenceFile/Lilly_the_Shigella.gff']
tmp_folder_path = 'TestParsingGenePresenceAbsenceFile/'
gene_data_file = {}
corrected_dir = ''
core_gene_dict, low_freq_gene_dict, \
acc_gene_dict = \
parse_gene_presence_absence.read_gene_presence_absence(
file_name, core_gene_presence,
low_freq_gene, source_program,
input_gffs, tmp_folder_path, gene_data_file, corrected_dir, self.logger)
expected_core_gene_dict = {'Silas_the_Salmonella': {'Silas_the_Salmonella_tag-1-1': "A",
'Silas_the_Salmonella_tag-1-2.1': "B",
'Silas_the_Salmonella_tag-1-2.2': "B",
'Silas_the_Salmonella_tag-1-3': 'C',
'Silas_the_Salmonella_tag-1-4.1': 'D',
'Silas_the_Salmonella_tag-1-4.2': 'D', },
'Christina_the_Streptococcus': {'Christina_the_Streptococcus_tag-2-1': "A",
'Christina_the_Streptococcus_tag-2-2': "B",
'Christina_the_Streptococcus_tag-2-3': "C",
'Christina_the_Streptococcus_tag-2-4': "D"},
'Ajwa_the_Shigella': {'Ajwa_the_Shigella_tag-3-1': "A",
'Ajwa_the_Shigella_tag-3-2': "B",
"Ajwa_the_Shigella_tag-3-3": "C",
"Ajwa_the_Shigella_tag-3-4": "D"},
'Ajwa_the_Legionella': {'Ajwa_the_Legionella_tag-4-1': "A",
'Ajwa_the_Legionella_tag-4-2': "B",
'Ajwa_the_Legionella_tag-4-3': "C",
'Ajwa_the_Legionella_tag-4-4': "D"},
'Cari_the_Listeria': {"Cari_the_Listeria_tag-5-3": "C",
"Cari_the_Listeria_tag-5-4": "D",
'Cari_the_Listeria_tag-5-1': "A",
'Cari_the_Listeria_tag-5-2': "B"},
'Aman_the_Streptococcus': {'Aman_the_Streptococcus_tag-6-1': "A",
'Aman_the_Streptococcus_tag-6-2': "B",
"Aman_the_Streptococcus_tag-6-3": "C",
"Aman_the_Streptococcus_tag-6-4": "D"},
'Zion_the_Streptococcus': {"Zion_the_Streptococcus_tag-7-3": "C",
"Zion_the_Streptococcus_tag-7-4": "D",
'Zion_the_Streptococcus_tag-7-1': "A",
'Zion_the_Streptococcus_tag-7-2': "B"},
'Dina_the_Shigella': {"Dina_the_Shigella_tag-8-3": "C",
"Dina_the_Shigella_tag-8-4": "D",
'Dina_the_Shigella_tag-8-1': "A",
'Dina_the_Shigella_tag-8-2': "B"},
'Silas_the_Legionella': {"Silas_the_Legionella_tag-9-3": "C",
"Silas_the_Legionella_tag-9-4": "D",
'Silas_the_Legionella_tag-9-1': "A",
'Silas_the_Legionella_tag-9-2': "B"},
'Lilly_the_Shigella': {'Lilly_the_Shigella_tag-10-1': "A",
'Lilly_the_Shigella_tag-10-2': "B"}}
expected_low_freq_gene_dict = {'Silas_the_Salmonella': {'Silas_the_Salmonella_tag-1-7': "G"},
'Christina_the_Streptococcus': {},
'Ajwa_the_Shigella': {},
'Ajwa_the_Legionella': {},
'Cari_the_Listeria': {},
'Aman_the_Streptococcus': {},
'Zion_the_Streptococcus': {},
'Dina_the_Shigella': {},
'Silas_the_Legionella': {},
'Lilly_the_Shigella': {'Lilly_the_Shigella_tag-10-6': "F"}}
expected_acc_gene_dict = {'Silas_the_Salmonella': {'Silas_the_Salmonella_tag-1-5.1': 'E',
'Silas_the_Salmonella_tag-1-5.2': 'E'},
'Christina_the_Streptococcus': {'Christina_the_Streptococcus_tag-2-5': "E"},
'Ajwa_the_Shigella': {"Ajwa_the_Shigella_tag-3-5": "E"},
'Ajwa_the_Legionella': {'Ajwa_the_Legionella_tag-4-5': "E"},
'Cari_the_Listeria': {"Cari_the_Listeria_tag-5-5": "E"},
'Aman_the_Streptococcus': {"Aman_the_Streptococcus_tag-6-5": "E"},
'Zion_the_Streptococcus': {"Zion_the_Streptococcus_tag-7-5": "E"},
'Dina_the_Shigella': {"Dina_the_Shigella_tag-8-5": "E"},
'Silas_the_Legionella': {"Silas_the_Legionella_tag-9-5": "E"},
'Lilly_the_Shigella': {'Lilly_the_Shigella_tag-10-5': "E"}}
self.assertEqual(expected_core_gene_dict, core_gene_dict)
self.assertEqual(expected_low_freq_gene_dict, low_freq_gene_dict)
self.assertEqual(expected_acc_gene_dict, acc_gene_dict)
def test_parsign_fragmented_gene_w_refound_component(self):
file_name = 'TestParsingGenePresenceAbsenceFile/gene_presence_absence_w_refound_fragment.csv'
core_gene_presence = 0.9
low_freq_gene = 0.1
source_program = 'Panaroo'
input_gffs = ['TestParsingGenePresenceAbsenceFile/Christina_the_Streptococcus.gff',
'TestParsingGenePresenceAbsenceFile/Ajwa_the_Shigella.gff',
'TestParsingGenePresenceAbsenceFile/Ajwa_the_Legionella.gff',
'TestParsingGenePresenceAbsenceFile/Silas_the_Salmonella_w_refound.gff',
'TestParsingGenePresenceAbsenceFile/Cari_the_Listeria.gff',
'TestParsingGenePresenceAbsenceFile/Aman_the_Streptococcus.gff',
'TestParsingGenePresenceAbsenceFile/Zion_the_Streptococcus.gff',
'TestParsingGenePresenceAbsenceFile/Dina_the_Shigella.gff',
'TestParsingGenePresenceAbsenceFile/Silas_the_Legionella.gff',
'TestParsingGenePresenceAbsenceFile/Lilly_the_Shigella.gff']
tmp_folder_path = 'TestParsingGenePresenceAbsenceFile/'
gene_data_file = {'Silas_the_Salmonella_w_refound': {'0_refound_0': ['CTCTTCCGATCTAATCAAGATTGAGAGGAATTGCTGTTTTTATTGGCAAGACAATTTTACTCTTCCGATCTAATCAAGATTGAGAGGAATT', 'gene_name', 'gene_function']}}
corrected_dir ='TestParsingGenePresenceAbsenceFile/Corrected_gffs'
core_gene_dict, low_freq_gene_dict, \
acc_gene_dict = \
parse_gene_presence_absence.read_gene_presence_absence(
file_name, core_gene_presence,
low_freq_gene, source_program,
input_gffs, tmp_folder_path, gene_data_file, corrected_dir, self.logger)
expected_core_gene_dict = {'Silas_the_Salmonella_w_refound': {'Silas_the_Salmonella_tag-1-1': "A",
'0_refound_0': "B",
'Silas_the_Salmonella_tag-1-2.2': "B",
'Silas_the_Salmonella_tag-1-3': 'C',
'Silas_the_Salmonella_tag-1-4.1': 'D',
'Silas_the_Salmonella_tag-1-4.2': 'D', },
'Christina_the_Streptococcus': {'Christina_the_Streptococcus_tag-2-1': "A",
'Christina_the_Streptococcus_tag-2-2': "B",
'Christina_the_Streptococcus_tag-2-3': "C",
'Christina_the_Streptococcus_tag-2-4': "D"},
'Ajwa_the_Shigella': {'Ajwa_the_Shigella_tag-3-1': "A",
'Ajwa_the_Shigella_tag-3-2': "B",
"Ajwa_the_Shigella_tag-3-3": "C",
"Ajwa_the_Shigella_tag-3-4": "D"},
'Ajwa_the_Legionella': {'Ajwa_the_Legionella_tag-4-1': "A",
'Ajwa_the_Legionella_tag-4-2': "B",
'Ajwa_the_Legionella_tag-4-3': "C",
'Ajwa_the_Legionella_tag-4-4': "D"},
'Cari_the_Listeria': {"Cari_the_Listeria_tag-5-3": "C",
"Cari_the_Listeria_tag-5-4": "D",
'Cari_the_Listeria_tag-5-1': "A",
'Cari_the_Listeria_tag-5-2': "B"},
'Aman_the_Streptococcus': {'Aman_the_Streptococcus_tag-6-1': "A",
'Aman_the_Streptococcus_tag-6-2': "B",
"Aman_the_Streptococcus_tag-6-3": "C",
"Aman_the_Streptococcus_tag-6-4": "D"},
'Zion_the_Streptococcus': {"Zion_the_Streptococcus_tag-7-3": "C",
"Zion_the_Streptococcus_tag-7-4": "D",
'Zion_the_Streptococcus_tag-7-1': "A",
'Zion_the_Streptococcus_tag-7-2': "B"},
'Dina_the_Shigella': {"Dina_the_Shigella_tag-8-3": "C",
"Dina_the_Shigella_tag-8-4": "D",
'Dina_the_Shigella_tag-8-1': "A",
'Dina_the_Shigella_tag-8-2': "B"},
'Silas_the_Legionella': {"Silas_the_Legionella_tag-9-3": "C",
"Silas_the_Legionella_tag-9-4": "D",
'Silas_the_Legionella_tag-9-1': "A",
'Silas_the_Legionella_tag-9-2': "B"},
'Lilly_the_Shigella': {'Lilly_the_Shigella_tag-10-1': "A",
'Lilly_the_Shigella_tag-10-2': "B"}}
expected_low_freq_gene_dict = {'Silas_the_Salmonella_w_refound': {'Silas_the_Salmonella_tag_2': "G"},
'Christina_the_Streptococcus': {},
'Ajwa_the_Shigella': {},
'Ajwa_the_Legionella': {},
'Cari_the_Listeria': {},
'Aman_the_Streptococcus': {},
'Zion_the_Streptococcus': {},
'Dina_the_Shigella': {},
'Silas_the_Legionella': {},
'Lilly_the_Shigella': {'Lilly_the_Shigella_tag-10-6': "F"}}
expected_acc_gene_dict = {'Silas_the_Salmonella_w_refound': {'Silas_the_Salmonella_tag-1-5.1': 'E',
'Silas_the_Salmonella_tag-1-5.2': 'E'},
'Christina_the_Streptococcus': {'Christina_the_Streptococcus_tag-2-5': "E"},
'Ajwa_the_Shigella': {"Ajwa_the_Shigella_tag-3-5": "E"},
'Ajwa_the_Legionella': {'Ajwa_the_Legionella_tag-4-5': "E"},
'Cari_the_Listeria': {"Cari_the_Listeria_tag-5-5": "E"},
'Aman_the_Streptococcus': {"Aman_the_Streptococcus_tag-6-5": "E"},
'Zion_the_Streptococcus': {"Zion_the_Streptococcus_tag-7-5": "E"},
'Dina_the_Shigella': {"Dina_the_Shigella_tag-8-5": "E"},
'Silas_the_Legionella': {"Silas_the_Legionella_tag-9-5": "E"},
'Lilly_the_Shigella': {'Lilly_the_Shigella_tag-10-5': "E"}}
self.assertEqual(expected_core_gene_dict, core_gene_dict)
self.assertEqual(expected_low_freq_gene_dict, low_freq_gene_dict)
self.assertEqual(expected_acc_gene_dict, acc_gene_dict)
class TestReadGeneData(unittest.TestCase):
""" Function to test the passing of gene_data.csv file from Panaroo """
def test_read_file(self):
expected_dict = {'PY_40': {'0_refound_0': ['CTCTTCCGATCTAATCAAGATTGAGAGGAATTGCTGTTTTTATTGGCAAGACAATTTTATTTTATCTGATTTTAATGTTGCTGGTCTATTTCTTTGGTTATCTAGGACATGGTCAAAGTAACTTTATTTATAATGAATTTTAG', 'gene_name', 'gene_function'],
'0_refound_100': ['CTCTTCCGATCTAATCAAGATTGAGAGGAATTGCTTTTTTTTTTGGCAAGACAATTTTATTTTATCTGATTTTAATGTTGCTGGTCTATTTCTTTGGTTATCTAGGACATGGTCAAAGTAACTTTATTTATAATGAATTTTAG', 'gene_name', 'gene_function'],
'0_refound_10': ['CTCTTCCGATCTAATCAAGATTGAGAGGAATTGCGCCTTGGCAAGACAATTTTATTTTATCTGATTTTAATGTTGCTGGTCTATTTCTTTGGTTATCTAGGACATGGTCAAAGTAACTTTATTTATAATGAATTTTAG', 'gene_name', 'gene_function']},
'PY_41': {'0_refound_1': ['ATGTTGTAGGAAAATACTTGGAAGAATACGTTGACAGGGGTATTTTTGATAAGGAGCCGTTCCAGACCTTTGATCAGAAAGGGATTGGCCGTCTCTTTAGCCCGTTCGGTTAAGCCTGAGTTGAAACTTGGTATTTGTGGGGAACATGGTGGCGATCCTGCTTCCATTGACTTTTACCACAGCCAAGGCCTGACCTACGTTTCTTGTTCGCCATTTAGAGTGCCGCTTACTCGCTTGGCGGCTGCTCAGGCTGCCATCAAAGCTTCAGGCCACAGTCTTACCCAAGACAAATAG', 'gene_name', 'gene_function']},
'PY_42': {'0_refound_2': ['ATGTCACTACTGCATATTCATCACAATAAAAAAAAGACAATAGCCCTAATCGTGCTATTGTCTCAAAATCATTTATTTACTTGAAACTTTATCGTGTTACACCAACAGTTTAA', 'gene_name', 'gene_function']},
'PY_43': {'0_refound_4': ['ATGAAACGCTATCAACAAGATGCCCTGCTTTTCAAAAAAAATAGATAAAGAAAAGGCTGCGACAGTATCTGCAAGCAGGGCAAAAGAACTAGAAGATAGGCTCAGTCATCAGCCATTAATTGATGATTATCGAGAAAAGATGCAAGATGCAAGATGCAAGTGATGTGACTCAGTATATCACCAAACGTATAGAAGATCAGTTAAACAAGGAGTTAACAAATGGCAAAAACTAA', 'gene_name', 'gene_function']}}
return_dict = correct_gffs.read_gene_data('TestReadGeneData/Mock_gene_data.csv')
self.assertEqual(expected_dict, return_dict)
class TestPrepairForReannotation(unittest.TestCase):
""" Test for pre-pairing a folder for corrected genomes, and testing if any are present from previous runs """
@classmethod
def setUpClass(cls):
cls.logger = logging.getLogger('test_logger.log')
cls.logger.setLevel(logging.INFO)
def tearDown(self):
try:
""" Class to remove created corrected output folder"""
os.rmdir('TestPrepairForReannotation/Corrected_gff_files')
except FileNotFoundError:
pass
def test_no_files_annotated(self):
input_gffs = ['Mock_1.gff', 'Mock_2.gff']
gene_data_dict_return, \
corrected_gff_out_dir_return, \
corrected_files_return = correct_gffs.prepair_for_reannotation('TestPrepairForReannotation/Mock_gene_data.csv',
'TestPrepairForReannotation/',
input_gffs, self.logger)
self.assertTrue(os.path.isdir('TestPrepairForReannotation/Corrected_gff_files'))
self.assertEqual(input_gffs, corrected_files_return)
def test_some_files_annotated(self):
input_gffs = ['mock/test/path/Mock_1.gff', 'mock/test/path/Mock_2.gff', 'Mocky/mock/mock/path/Mock_3.gff']
gene_data_dict_return, corrected_gff_out_dir_return, corrected_files_return = correct_gffs.prepair_for_reannotation(
'TestPrepairForReannotation/Mock_gene_data.csv',
'TestPrepairForReannotation/Some_genomes',
input_gffs, self.logger)
expected_gffs = ['mock/test/path/Mock_2.gff',
'Mocky/mock/mock/path/Mock_3.gff',
'TestPrepairForReannotation/Some_genomes/Corrected_gff_files/Mock_1_corrected.gff']
self.assertEqual(expected_gffs, corrected_files_return)
def test_all_files_annotated(self):
input_gffs = ['Mock_1.gff', 'Mock_2.gff']
gene_data_dict_return, corrected_gff_out_dir_return, corrected_files_return = correct_gffs.prepair_for_reannotation(
'TestPrepairForReannotation/Mock_gene_data.csv',
'TestPrepairForReannotation/All_genomes',
input_gffs, self.logger)
expected_gffs = ['TestPrepairForReannotation/All_genomes/Corrected_gff_files/Mock_1_corrected.gff',
'TestPrepairForReannotation/All_genomes/Corrected_gff_files/Mock_2_corrected.gff']
self.assertEqual(expected_gffs, corrected_files_return)
class TestAddGeneToGff(unittest.TestCase):
"""
Test of the function used to add a gene annotation (line) to a gff file
"""
# Make a setup and a teardown that copies and renames the mock file
def setUp(self):
""" Class to copy the mock gff before modifying"""
copyfile('TestAddGeneToGff/mocky_test_gff.gff', 'TestAddGeneToGff/mocky_test_gff.gff_copy')
def tearDown(self):
""" Class to remove modified gff and rename the original"""
os.remove('TestAddGeneToGff/mocky_test_gff.gff')
os.rename('TestAddGeneToGff/mocky_test_gff.gff_copy', 'TestAddGeneToGff/mocky_test_gff.gff')
def test_adding_a_gene_no_info(self):
tmp_gff_file = 'TestAddGeneToGff/mocky_test_gff.gff'
gene_oi = ['TATA', '', '']
genome_oi = 'CCCCCCCCCCCCTATACCCCCCCC'
contig = 'test_contig_1'
strand = '+'
refound_gene_tag = '0_refound_0'
largest_locus_tag = 'fer_1432'
expected_lines = ['##gff-version 3\n', '#test comment line\n', 'test_contig_1\tPanaroo\tCDS\t13\t16\t.\t+\t0\tID=fer_1433;locus_tag=fer_1433;old_locus_tag=0_refound_0\n']
with open(tmp_gff_file, 'a') as tmp_gff:
correct_gffs.add_gene_to_gff(tmp_gff, gene_oi[0], genome_oi, contig, strand, refound_gene_tag, gene_oi[1:], largest_locus_tag)
with open('TestAddGeneToGff/mocky_test_gff.gff', 'r') as added_gff:
self.assertEqual(expected_lines, added_gff.readlines())
def test_adding_a_gene_name(self):
tmp_gff_file = 'TestAddGeneToGff/mocky_test_gff.gff'
gene_oi = ['TATA', 'Gene_name', '']
genome_oi = 'CCCCCCCCCCCCTATACCCCCCCC'
contig = 'test_contig_1'
strand = '+'
refound_gene_tag = '0_refound_0'
largest_locus_tag = 'fer_1432'
expected_lines = ['##gff-version 3\n', '#test comment line\n', 'test_contig_1\tPanaroo\tCDS\t13\t16\t.\t+\t0\tID=fer_1433;locus_tag=fer_1433;old_locus_tag=0_refound_0;name=Gene_name\n']
with open(tmp_gff_file, 'a') as tmp_gff:
correct_gffs.add_gene_to_gff(tmp_gff, gene_oi[0], genome_oi, contig, strand, refound_gene_tag, gene_oi[1:], largest_locus_tag)
with open('TestAddGeneToGff/mocky_test_gff.gff', 'r') as added_gff:
self.assertEqual(expected_lines, added_gff.readlines())
def test_adding_a_gene_annotation(self):
tmp_gff_file = 'TestAddGeneToGff/mocky_test_gff.gff'
gene_oi = ['TATA', '', 'Gene_annotation']
genome_oi = 'CCCCCCCCCCCCTATACCCCCCCC'
contig = 'test_contig_1'
strand = '+'
refound_gene_tag = '0_refound_0'
largest_locus_tag = 'fer_1432'
expected_lines = ['##gff-version 3\n', '#test comment line\n', 'test_contig_1\tPanaroo\tCDS\t13\t16\t.\t+\t0\tID=fer_1433;locus_tag=fer_1433;old_locus_tag=0_refound_0;annotation=Gene_annotation\n']
with open(tmp_gff_file, 'a') as tmp_gff:
correct_gffs.add_gene_to_gff(tmp_gff, gene_oi[0], genome_oi, contig, strand, refound_gene_tag, gene_oi[1:], largest_locus_tag)
with open('TestAddGeneToGff/mocky_test_gff.gff', 'r') as added_gff:
self.assertEqual(expected_lines, added_gff.readlines())
def test_adding_a_gene_name_and_annotation(self):
tmp_gff_file = 'TestAddGeneToGff/mocky_test_gff.gff'
gene_oi = ['TATA', 'Gene_name', 'Gene_annotation']
genome_oi = 'CCCCCCCCCCCCTATACCCCCCCC'
contig = 'test_contig_1'
strand = '+'
refound_gene_tag = '0_refound_0'
largest_locus_tag = 'fer_1432'
expected_lines = ['##gff-version 3\n', '#test comment line\n', 'test_contig_1\tPanaroo\tCDS\t13\t16\t.\t+\t0\tID=fer_1433;locus_tag=fer_1433;old_locus_tag=0_refound_0;name=Gene_name;annotation=Gene_annotation\n']
with open(tmp_gff_file, 'a') as tmp_gff:
correct_gffs.add_gene_to_gff(tmp_gff, gene_oi[0], genome_oi, contig, strand, refound_gene_tag, gene_oi[1:], largest_locus_tag)
with open('TestAddGeneToGff/mocky_test_gff.gff', 'r') as added_gff:
self.assertEqual(expected_lines, added_gff.readlines())
class TestWriteContig(unittest.TestCase):
"""
Test of the function used to write a contig in a gff file.
"""
# Make a setup and a teardown that copies and renames the mock file
def setUp(self):
""" Class to copy the mock gff before modifying"""
copyfile('TestWriteContig/mocky_test_gff.gff', 'TestWriteContig/mocky_test_gff.gff_copy')
def tearDown(self):
""" Class to remove modified gff and rename the original"""
os.remove('TestWriteContig/mocky_test_gff.gff')
os.rename('TestWriteContig/mocky_test_gff.gff_copy', 'TestWriteContig/mocky_test_gff.gff')
def test_writing_a_contig(self):
file_path = 'TestWriteContig/mocky_test_gff.gff'
contig_name = 'Test_contig_name space'
sequence = 'AAATAAATGGGCGGGCAAATAAATGGGCGGGCAAATAAATGGGCGGGCAAATAAATGGGCGGGCAAATAAATGGGCGGGCAAATAAATGGGCGGGC'
expected_lines = ['##gff-version 3\n', '#test comment line\n', '>Test_contig_name space\n',
'AAATAAATGGGCGGGCAAATAAATGGGCGGGCAAATAAATGGGCGGGCAAATAAATGGGC\n',
'GGGCAAATAAATGGGCGGGCAAATAAATGGGCGGGC\n']
with open(file_path, 'a') as file:
correct_gffs.write_contig(file, contig_name, sequence)
with open('TestWriteContig/mocky_test_gff.gff', 'r') as added_gff:
self.assertEqual(expected_lines, added_gff.readlines())
class TestAnnotateRefoundGenomes(unittest.TestCase):
"""
Test of the function used to reannotate refound genes identified by panaroo in a gff file.
"""
@classmethod
def setUpClass(cls):
cls.logger = logging.getLogger('test_logger.log')
cls.logger.setLevel(logging.INFO)
def tearDown(self):
""" Class to remove modified gff and rename the original"""
try:
os.remove('TestAnnotateRefoundGenomes/reannotate_gff_corrected.gff')
except FileNotFoundError:
os.remove('TestAnnotateRefoundGenomes/reannotate_gff_tmp.gff')
os.remove('TestAnnotateRefoundGenomes/reannotate_gff.gff_db')
def test_annotation_of_pos_stand_gene(self):
gff_name = 'TestAnnotateRefoundGenomes/reannotate_gff.gff'
gene_data_dict = {'reannotate_gff': {'0_refound_0': ['CTCTTCCGATCTAATCAAGATTGAGAGGAATTGCTGTTTTTATTGGCAAGACAATTTTATTTTATCTGATTTTAATGTTGCTGGTCTATTTCTTTGGTTATCTAGGACATGGTCAAAGTAACTTTATTTATAATGAATTTTAG', 'gene_name', 'gene_function'],
'0_refound_100': ['CTCTTCCGATCTAATCAAGATTGAGAGGAATTGCTTTTTTTTTTGGCAAGACAATTTTATTTTATCTGATTTTAATGTTGCTGGTCTATTTCTTTGGTTATCTAGGACATGGTCAAAGTAACTTTATTTATAATGAATTTTAG', '', 'gene_function'],
'0_refound_10': ['CTCTTCCGATCTAATCAAGATTGAGAGGAATTGCGCCTTGGCAAGACAATTTTATTTTATCTGATTTTAATGTTGCTGGTCTATTTCTTTGGTTATCTAGGACATGGTCAAAGTAACTTTATTTATAATGAATTTTAG', 'gene_name', '']}}
tmp_folder_path = 'TestAnnotateRefoundGenomes'
corrected_gff_out_dir = 'TestAnnotateRefoundGenomes'
expected_lines = \
['##gff-version 3\n',
'#test comment line\n',
'test_contig\tProkka\tCDS\t1\t10\t.\t+\t0\tlocus_tag=locus_tag_0097\n',
'test_contig\tPanaroo\tCDS\t16\t158\t.\t+\t0\tID=locus_tag_0099;locus_tag=locus_tag_0099;old_locus_tag=0_refound_0;name=gene_name;annotation=gene_function\n',
'test_contig\tPanaroo\tCDS\t174\t316\t.\t+\t0\tID=locus_tag_0100;locus_tag=locus_tag_0100;old_locus_tag=0_refound_100;annotation=gene_function\n',
'test_contig\tPanaroo\tCDS\t332\t469\t.\t+\t0\tID=locus_tag_0101;locus_tag=locus_tag_0101;old_locus_tag=0_refound_10;name=gene_name\n',
'test_contig\tProkka\tCDS\t474\t484\t.\t+\t0\tlocus_tag=locus_tag_0098\n',
'##FASTA\n',
'>test_contig\n',
'TTTTTTTTTTTTTTTCTCTTCCGATCTAATCAAGATTGAGAGGAATTGCTGTTTTTATTG\n',
'GCAAGACAATTTTATTTTATCTGATTTTAATGTTGCTGGTCTATTTCTTTGGTTATCTAG\n',
'GACATGGTCAAAGTAACTTTATTTATAATGAATTTTAGTTTTTTTTTTTTTTTCTCTTCC\n',
'GATCTAATCAAGATTGAGAGGAATTGCTTTTTTTTTTGGCAAGACAATTTTATTTTATCT\n',
'GATTTTAATGTTGCTGGTCTATTTCTTTGGTTATCTAGGACATGGTCAAAGTAACTTTAT\n',
'TTATAATGAATTTTAGTTTTTTTTTTTTTTTCTCTTCCGATCTAATCAAGATTGAGAGGA\n',
'ATTGCGCCTTGGCAAGACAATTTTATTTTATCTGATTTTAATGTTGCTGGTCTATTTCTT\n',
'TGGTTATCTAGGACATGGTCAAAGTAACTTTATTTATAATGAATTTTAGTTTTTTTTTTT\n',
'TTTT\n'
]
correct_gffs.annotate_refound_genes(gff_name, gene_data_dict, tmp_folder_path, corrected_gff_out_dir, self.logger)
with open('TestAnnotateRefoundGenomes/reannotate_gff_corrected.gff', 'r') as added_gff:
self.assertEqual(expected_lines, added_gff.readlines())
def test_annotation_of_neg_stand_gene(self):
gff_name = 'TestAnnotateRefoundGenomes/reannotate_gff.gff'
gene_data_dict = {'reannotate_gff': {'0_refound_0': ['CTAAAATTCATTATAAATAAAGTTACTTTGACCATGTCCTAGATAACCAAAGAAATAGACCAGCAACATTAAAATCAGATAAAATAAAATTGTCTTGCCAATAAAAACAGCAATTCCTCTCAATCTTGATTAGATCGGAAGAG', 'gene_name', 'gene_function'],
'0_refound_100': ['CTAAAATTCATTATAAATAAAGTTACTTTGACCATGTCCTAGATAACCAAAGAAATAGACCAGCAACATTAAAATCAGATAAAATAAAATTGTCTTGCCAAAAAAAAAAGCAATTCCTCTCAATCTTGATTAGATCGGAAGAG', '', 'gene_function'],
'0_refound_10': ['CTAAAATTCATTATAAATAAAGTTACTTTGACCATGTCCTAGATAACCAAAGAAATAGACCAGCAACATTAAAATCAGATAAAATAAAATTGTCTTGCCAAGGCGCAATTCCTCTCAATCTTGATTAGATCGGAAGAG', 'gene_name', '']}}
tmp_folder_path = 'TestAnnotateRefoundGenomes'
corrected_gff_out_dir = 'TestAnnotateRefoundGenomes'
expected_lines = ['##gff-version 3\n',
'#test comment line\n',
'test_contig\tProkka\tCDS\t1\t10\t.\t+\t0\tlocus_tag=locus_tag_0097\n',
'test_contig\tPanaroo\tCDS\t16\t158\t.\t-\t0\tID=locus_tag_0099;locus_tag=locus_tag_0099;old_locus_tag=0_refound_0;name=gene_name;annotation=gene_function\n',
'test_contig\tPanaroo\tCDS\t174\t316\t.\t-\t0\tID=locus_tag_0100;locus_tag=locus_tag_0100;old_locus_tag=0_refound_100;annotation=gene_function\n',
'test_contig\tPanaroo\tCDS\t332\t469\t.\t-\t0\tID=locus_tag_0101;locus_tag=locus_tag_0101;old_locus_tag=0_refound_10;name=gene_name\n',
'test_contig\tProkka\tCDS\t474\t484\t.\t+\t0\tlocus_tag=locus_tag_0098\n',
'##FASTA\n',
'>test_contig\n',
'TTTTTTTTTTTTTTTCTCTTCCGATCTAATCAAGATTGAGAGGAATTGCTGTTTTTATTG\n',
'GCAAGACAATTTTATTTTATCTGATTTTAATGTTGCTGGTCTATTTCTTTGGTTATCTAG\n',
'GACATGGTCAAAGTAACTTTATTTATAATGAATTTTAGTTTTTTTTTTTTTTTCTCTTCC\n',
'GATCTAATCAAGATTGAGAGGAATTGCTTTTTTTTTTGGCAAGACAATTTTATTTTATCT\n',
'GATTTTAATGTTGCTGGTCTATTTCTTTGGTTATCTAGGACATGGTCAAAGTAACTTTAT\n',
'TTATAATGAATTTTAGTTTTTTTTTTTTTTTCTCTTCCGATCTAATCAAGATTGAGAGGA\n',
'ATTGCGCCTTGGCAAGACAATTTTATTTTATCTGATTTTAATGTTGCTGGTCTATTTCTT\n',
'TGGTTATCTAGGACATGGTCAAAGTAACTTTATTTATAATGAATTTTAGTTTTTTTTTTT\n',
'TTTT\n']
correct_gffs.annotate_refound_genes(gff_name, gene_data_dict, tmp_folder_path, corrected_gff_out_dir, self.logger)
with open('TestAnnotateRefoundGenomes/reannotate_gff_corrected.gff', 'r') as added_gff:
self.assertEqual(expected_lines, added_gff.readlines())
def test_gene_not_found(self):
gff_name = 'TestAnnotateRefoundGenomes/reannotate_gff.gff'
gene_data_dict = {'reannotate_gff': {'0_refound_0': [
'CCCCCCCCCCCCGGGGGGGGGGGGGGGCGGCGCGCGCGCGCGCGGCGCGCGCGGCGCGC',
'gene_name', 'gene_function']}}
tmp_folder_path = 'TestAnnotateRefoundGenomes'
corrected_gff_out_dir = 'TestAnnotateRefoundGenomes'
with self.assertRaises(SystemExit):
correct_gffs.annotate_refound_genes(gff_name, gene_data_dict, tmp_folder_path, corrected_gff_out_dir, self.logger)
# TODO - Add test for annotating of second contig
class TestExtractGenomeFasta(unittest.TestCase):
def test_extract_genome_fasta(self):
genome_fasta_dict_expected = {'contig_1': "NNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNN"}
largest_locus_tag_expected = 'fer_006'
header_lines_expected = ['##gff-version3\n', '#test-line\n']
genome_fasta_dict, largest_locus_tag, header_lines = correct_gffs.extract_genome_fasta('TestExtractGenomeFasta/Mock_gff.gff')
self.assertEqual(genome_fasta_dict_expected, genome_fasta_dict)
self.assertEqual(largest_locus_tag_expected, largest_locus_tag)
self.assertEqual(header_lines_expected, header_lines)
class TestParsingGffFile(unittest.TestCase):
""" Test of the function that is used to pass a gff file and return a generator object of CDS lines """
def test_gff_generator_generation_not_corrected(self):
input_gff_file = 'TestParsingGffFile/Silas_the_Salmonella.gff'
expected_output = [['contig_1', '.', 'CDS', '1', '90', '.', '.', '.', 'Silas_the_Salmonella_tag-1-1'],
['contig_1', '.', 'CDS', '100', '190', '.', '.', '.', 'Silas_the_Salmonella_tag-1-2.1'],
['contig_1', '.', 'CDS', '200', '290', '.', '.', '.', 'Silas_the_Salmonella_tag-1-2.2'],
['contig_1', '.', 'CDS', '300', '390', '.', '.', '.', 'Silas_the_Salmonella_tag-1-3'],
['contig_1', '.', 'CDS', '400', '490', '.', '.', '.', 'Silas_the_Salmonella_tag-1-4.1'],
['contig_1', '.', 'CDS', '500', '590', '.', '.', '.', 'Silas_the_Salmonella_tag-1-4.2'],
['contig_1', '.', 'CDS', '600', '690', '.', '.', '.', 'Silas_the_Salmonella_tag-1-5.1'],
['contig_1', '.', 'CDS', '700', '790', '.', '.', '.', 'Silas_the_Salmonella_tag-1.7'],
['contig_1', '.', 'CDS', '800', '890', '.', '.', '.', "Silas_the_Salmonella_tag-1-5.2"]]
return_generator = gff_parser.parse_gff(input_gff_file)
for expected, generated in zip(expected_output, return_generator):
self.assertEqual(expected, generated)
def test_gff_generator_generation_corrected_gff(self):
input_gff_file = 'TestParsingGffFile/Silas_the_Salmonella_corrected.gff'
expected_output = [['contig_1', '.', 'CDS', '1', '90', '.', '.', '.', 'Silas_the_Salmonella_tag-1-1'],
['contig_1', '.', 'CDS', '100', '190', '.', '.', '.', 'Silas_the_Salmonella_tag-1-2.1'],
['contig_1', '.', 'CDS', '200', '290', '.', '.', '.', 'Silas_the_Salmonella_tag-1-2.2'],
['contig_1', '.', 'CDS', '300', '390', '.', '.', '.', 'Silas_the_Salmonella_tag-1-3'],
['contig_1', '.', 'CDS', '400', '490', '.', '.', '.', 'Silas_the_Salmonella_tag-1-4.1'],
['contig_1', '.', 'CDS', '500', '590', '.', '.', '.', 'Silas_the_Salmonella_tag-1-4.2'],
['contig_1', '.', 'CDS', '600', '690', '.', '.', '.', 'Silas_the_Salmonella_tag-1-5.1'],
['contig_1', '.', 'CDS', '700', '790', '.', '.', '.', 'Silas_the_Salmonella_tag-1.7'],
['contig_1', '.', 'CDS', '800', '890', '.', '.', '.', "Silas_the_Salmonella_tag-1-5.2"],
['contig_1', 'Panaroo', 'CDS', '900', '1000', '.', '+', '0', 'refound_gene_1']]
return_generator = gff_parser.parse_gff(input_gff_file)
for expected, generated in zip(expected_output, return_generator):
self.assertEqual(expected, generated)
class TestGetContigLenth(unittest.TestCase):
"""
Test function that passes a gff file and counts the length of each contig in attached genome
"""
def test_single_contig(self):
input_gff_path = 'TestGetContigLenth/single_contig_unwrapped.txt'
expected_dict = {'contig_1': 1300}
return_dict = gff_parser.get_contig_lengths(input_gff_path)
self.assertEqual(expected_dict, return_dict)
def test_single_wrapped_contig(self):
input_gff_path = 'TestGetContigLenth/single_contig_wrapped.txt'
expected_dict = {'contig_1': 1300}
return_dict = gff_parser.get_contig_lengths(input_gff_path)
self.assertEqual(expected_dict, return_dict)
def test_multiple_contigs(self):
input_gff_path = 'TestGetContigLenth/multi_contig_unwrapped.txt'
expected_dict = {'contig_1': 1300,
'contig_2': 1300}
return_dict = gff_parser.get_contig_lengths(input_gff_path)
self.assertEqual(expected_dict, return_dict)
def test_multiple_wrapped_contigs(self):
input_gff_path = 'TestGetContigLenth/multi_contig_wrapped.txt'
expected_dict = {'contig_1': 1300,
'contig_2': 1300}
return_dict = gff_parser.get_contig_lengths(input_gff_path)
self.assertEqual(expected_dict, return_dict)
class TestRecordCoreCoreRegion(unittest.TestCase):
"""
Test function that is used to record information of a region identified between two core genes.
"""
def test_recording_neighbouring_core_genes(self):
core_genes = {'gff_name': {'Core_ID_1': 'pan_gene_1',
'Core_ID_2': 'pan_gene_2'}}
gff_name = 'gff_name'
gff_line = ['gff_name_contig_1', '.', 'CDS', '90', '150', '.', '.', '.', 'Core_ID_2']
contig_end = 1500
previous_core_gene_id = 'Core_ID_1'
previous_core_gene_end_coor = 10
acc_genes_in_region = []
low_freq_genes_in_region = []
core_gene_pair_distance = {}
accessory_gene_content = {}
low_freq_gene_content = {}
core_gene_pairs = []
master_info = {}
# Set up the expected return values
expected_previous_core_gene_id = 'Core_ID_2'
expected_previous_core_gene_end_coor = 150
expected_acc_genes_in_region = []
expected_low_freq_genes_in_region = []
expected_core_gene_pair_distance = {'pan_gene_1--pan_gene_2': 79}
expected_accessory_gene_content = {'pan_gene_1--pan_gene_2': []}
expected_low_freq_gene_content = {'pan_gene_1--pan_gene_2': []}
expected_core_gene_pairs = ['pan_gene_1--pan_gene_2']
expected_master_info = {'pan_gene_1--pan_gene_2--gff_name': ['gff_name', 'pan_gene_1', 'pan_gene_2',
79, 0, [], []]}
return_previous_core_gene_id, return_previous_core_gene_end_coor, return_acc_genes_in_region, \
return_low_freq_genes_in_region, return_core_gene_pair_distance, return_accessory_gene_content, \
return_low_freq_gene_content, return_core_gene_pairs, \
return_master_info = gff_parser.record_core_core_region(core_genes, gff_name, gff_line, contig_end,
previous_core_gene_id, previous_core_gene_end_coor,
acc_genes_in_region, low_freq_genes_in_region,
core_gene_pair_distance, accessory_gene_content,
low_freq_gene_content, core_gene_pairs,
master_info)
self.assertEqual(expected_previous_core_gene_id, return_previous_core_gene_id)
self.assertEqual(expected_previous_core_gene_end_coor, return_previous_core_gene_end_coor)
self.assertEqual(expected_acc_genes_in_region, return_acc_genes_in_region)
self.assertEqual(expected_low_freq_genes_in_region, return_low_freq_genes_in_region)
self.assertEqual(expected_accessory_gene_content, return_accessory_gene_content)
self.assertEqual(expected_low_freq_gene_content, return_low_freq_gene_content)
self.assertEqual(expected_core_gene_pair_distance, return_core_gene_pair_distance)
self.assertEqual(expected_core_gene_pairs, return_core_gene_pairs)
self.assertEqual(expected_master_info, return_master_info)
def test_recording_neighbouring_core_genes_w_accessory(self):
core_genes = {'gff_name': {'Core_ID_1': 'pan_gene_1',
'Core_ID_2': 'pan_gene_2'}}
gff_name = 'gff_name'
gff_line = ['gff_name_contig_1', '.', 'CDS', '90', '150', '.', '.', '.', 'Core_ID_2']
contig_end = 1500
previous_core_gene_id = 'Core_ID_1'
previous_core_gene_end_coor = 10
acc_genes_in_region = ['acc_gene_1', 'acc_gene_2']
low_freq_genes_in_region = ['low_freq_1']
core_gene_pair_distance = {}
accessory_gene_content = {}
low_freq_gene_content = {}
core_gene_pairs = []
master_info = {}
# Set up the expected return values
expected_previous_core_gene_id = 'Core_ID_2'
expected_previous_core_gene_end_coor = 150
expected_acc_genes_in_region = []
expected_low_freq_genes_in_region = []
expected_core_gene_pair_distance = {'pan_gene_1--pan_gene_2': 79}
expected_accessory_gene_content = {'pan_gene_1--pan_gene_2': ['acc_gene_1', 'acc_gene_2']}
expected_low_freq_gene_content = {'pan_gene_1--pan_gene_2': ['low_freq_1']}
expected_core_gene_pairs = ['pan_gene_1--pan_gene_2']
expected_master_info = {'pan_gene_1--pan_gene_2--gff_name': ['gff_name', 'pan_gene_1', 'pan_gene_2',
79, 3, ['acc_gene_1', 'acc_gene_2'], ['low_freq_1']]}
return_previous_core_gene_id, return_previous_core_gene_end_coor, return_acc_genes_in_region, \
return_low_freq_genes_in_region, return_core_gene_pair_distance, return_accessory_gene_content, \
return_low_freq_gene_content, return_core_gene_pairs, \
return_master_info = gff_parser.record_core_core_region(core_genes, gff_name, gff_line, contig_end,
previous_core_gene_id, previous_core_gene_end_coor,
acc_genes_in_region, low_freq_genes_in_region,
core_gene_pair_distance, accessory_gene_content,
low_freq_gene_content, core_gene_pairs,
master_info)
self.assertEqual(expected_previous_core_gene_id, return_previous_core_gene_id)
self.assertEqual(expected_previous_core_gene_end_coor, return_previous_core_gene_end_coor)
self.assertEqual(expected_acc_genes_in_region, return_acc_genes_in_region)
self.assertEqual(expected_low_freq_genes_in_region, return_low_freq_genes_in_region)
self.assertEqual(expected_accessory_gene_content, return_accessory_gene_content)
self.assertEqual(expected_low_freq_gene_content, return_low_freq_gene_content)
self.assertEqual(expected_core_gene_pair_distance, return_core_gene_pair_distance)
self.assertEqual(expected_core_gene_pairs, return_core_gene_pairs)
self.assertEqual(expected_master_info, return_master_info)
def test_recording_w_fragment_given(self):
core_genes = {'gff_name': {'Core_ID_1': 'pan_gene_1',
'Core_ID_2': 'pan_gene_1'}}
gff_name = 'gff_name'
gff_line = ['gff_name_contig_1', '.', 'CDS', '90', '150', '.', '.', '.', 'Core_ID_2']
contig_end = 1500
previous_core_gene_id = 'Core_ID_1'
previous_core_gene_end_coor = 10
acc_genes_in_region = []
low_freq_genes_in_region = []
core_gene_pair_distance = {}
accessory_gene_content = {}
low_freq_gene_content = {}
core_gene_pairs = []
master_info = {}
# Set up the expected return values
expected_previous_core_gene_id = 'Core_ID_2'
expected_previous_core_gene_end_coor = 150
expected_acc_genes_in_region = []
expected_low_freq_genes_in_region = []
expected_core_gene_pair_distance = {}
expected_accessory_gene_content = {}
expected_low_freq_gene_content = {}
expected_core_gene_pairs = []
expected_master_info = {}
return_previous_core_gene_id, return_previous_core_gene_end_coor, return_acc_genes_in_region, \
return_low_freq_genes_in_region, return_core_gene_pair_distance, return_accessory_gene_content, \
return_low_freq_gene_content, return_core_gene_pairs, \
return_master_info = gff_parser.record_core_core_region(core_genes, gff_name, gff_line, contig_end,
previous_core_gene_id, previous_core_gene_end_coor,
acc_genes_in_region, low_freq_genes_in_region,
core_gene_pair_distance, accessory_gene_content,
low_freq_gene_content, core_gene_pairs,
master_info)
self.assertEqual(expected_previous_core_gene_id, return_previous_core_gene_id)
self.assertEqual(expected_previous_core_gene_end_coor, return_previous_core_gene_end_coor)
self.assertEqual(expected_acc_genes_in_region, return_acc_genes_in_region)
self.assertEqual(expected_low_freq_genes_in_region, return_low_freq_genes_in_region)
self.assertEqual(expected_accessory_gene_content, return_accessory_gene_content)
self.assertEqual(expected_low_freq_gene_content, return_low_freq_gene_content)
self.assertEqual(expected_core_gene_pair_distance, return_core_gene_pair_distance)
self.assertEqual(expected_core_gene_pairs, return_core_gene_pairs)
self.assertEqual(expected_master_info, return_master_info)
def test_recording_core_gene_before_seqeuncebreak(self):
core_genes = {'gff_name': {'Core_ID_1': 'pan_gene_1'}}
gff_name = 'gff_name'
gff_line = None
contig_end = 1500
previous_core_gene_id = 'Core_ID_1'
previous_core_gene_end_coor = 150
acc_genes_in_region = []
low_freq_genes_in_region = []
core_gene_pair_distance = {}
accessory_gene_content = {}
low_freq_gene_content = {}
core_gene_pairs = []
master_info = {}
# Set up the expected return values
expected_previous_core_gene_id = 'Sequence_break'
expected_previous_core_gene_end_coor = 150
expected_acc_genes_in_region = []
expected_low_freq_genes_in_region = []
expected_core_gene_pair_distance = {'pan_gene_1--Sequence_break': 1350}
expected_accessory_gene_content = {'pan_gene_1--Sequence_break': []}
expected_low_freq_gene_content = {'pan_gene_1--Sequence_break': []}
expected_core_gene_pairs = ['pan_gene_1--Sequence_break']
expected_master_info = {'pan_gene_1--Sequence_break--gff_name': ['gff_name', 'pan_gene_1', 'Sequence_break',
1350, 0, [], []]}
return_previous_core_gene_id, return_previous_core_gene_end_coor, return_acc_genes_in_region, \
return_low_freq_genes_in_region, return_core_gene_pair_distance, return_accessory_gene_content, \
return_low_freq_gene_content, return_core_gene_pairs, \
return_master_info = gff_parser.record_core_core_region(core_genes, gff_name, gff_line, contig_end,
previous_core_gene_id, previous_core_gene_end_coor,
acc_genes_in_region, low_freq_genes_in_region,
core_gene_pair_distance, accessory_gene_content,
low_freq_gene_content, core_gene_pairs,
master_info)
self.assertEqual(expected_previous_core_gene_id, return_previous_core_gene_id)
self.assertEqual(expected_previous_core_gene_end_coor, return_previous_core_gene_end_coor)
self.assertEqual(expected_acc_genes_in_region, return_acc_genes_in_region)
self.assertEqual(expected_low_freq_genes_in_region, return_low_freq_genes_in_region)
self.assertEqual(expected_accessory_gene_content, return_accessory_gene_content)
self.assertEqual(expected_low_freq_gene_content, return_low_freq_gene_content)
self.assertEqual(expected_core_gene_pair_distance, return_core_gene_pair_distance)
self.assertEqual(expected_core_gene_pairs, return_core_gene_pairs)
self.assertEqual(expected_master_info, return_master_info)
def test_recording_core_gene_before_seqeuncebreak_w_accessory(self):
core_genes = {'gff_name': {'Core_ID_1': 'pan_gene_1'}}
gff_name = 'gff_name'
gff_line = None
contig_end = 1500
previous_core_gene_id = 'Core_ID_1'
previous_core_gene_end_coor = 150
acc_genes_in_region = ['acc_1']
low_freq_genes_in_region = ['low_1', "low_2"]
core_gene_pair_distance = {}
accessory_gene_content = {}
low_freq_gene_content = {}
core_gene_pairs = []
master_info = {}
# Set up the expected return values
expected_previous_core_gene_id = 'Sequence_break'
expected_previous_core_gene_end_coor = 150
expected_acc_genes_in_region = []
expected_low_freq_genes_in_region = []
expected_core_gene_pair_distance = {'pan_gene_1--Sequence_break': 1350}
expected_accessory_gene_content = {'pan_gene_1--Sequence_break': ['acc_1']}
expected_low_freq_gene_content = {'pan_gene_1--Sequence_break': ['low_1', "low_2"]}
expected_core_gene_pairs = ['pan_gene_1--Sequence_break']
expected_master_info = {'pan_gene_1--Sequence_break--gff_name': ['gff_name', 'pan_gene_1', 'Sequence_break',
1350, 3, ['acc_1'], ['low_1', "low_2"]]}
return_previous_core_gene_id, return_previous_core_gene_end_coor, return_acc_genes_in_region, \
return_low_freq_genes_in_region, return_core_gene_pair_distance, return_accessory_gene_content, \
return_low_freq_gene_content, return_core_gene_pairs, \
return_master_info = gff_parser.record_core_core_region(core_genes, gff_name, gff_line, contig_end,
previous_core_gene_id, previous_core_gene_end_coor,
acc_genes_in_region, low_freq_genes_in_region,
core_gene_pair_distance, accessory_gene_content,
low_freq_gene_content, core_gene_pairs,
master_info)
self.assertEqual(expected_previous_core_gene_id, return_previous_core_gene_id)
self.assertEqual(expected_previous_core_gene_end_coor, return_previous_core_gene_end_coor)
self.assertEqual(expected_acc_genes_in_region, return_acc_genes_in_region)
self.assertEqual(expected_low_freq_genes_in_region, return_low_freq_genes_in_region)
self.assertEqual(expected_accessory_gene_content, return_accessory_gene_content)
self.assertEqual(expected_low_freq_gene_content, return_low_freq_gene_content)
self.assertEqual(expected_core_gene_pair_distance, return_core_gene_pair_distance)
self.assertEqual(expected_core_gene_pairs, return_core_gene_pairs)
self.assertEqual(expected_master_info, return_master_info)
def test_recording_first_core_gene_on_contig_as_first_gene(self):
core_genes = {'gff_name': {'Core_ID_1': 'pan_gene_1'}}
gff_name = 'gff_name'
gff_line = ['gff_name_contig_1', '.', 'CDS', '90', '180', '.', '.', '.', 'Core_ID_1']
contig_end = 0
previous_core_gene_id = 'Sequence_break'
previous_core_gene_end_coor = 150
acc_genes_in_region = []
low_freq_genes_in_region = []
core_gene_pair_distance = {}
accessory_gene_content = {}
low_freq_gene_content = {}
core_gene_pairs = []
master_info = {}
# Set up the expected return values
expected_previous_core_gene_id = 'Core_ID_1'
expected_previous_core_gene_end_coor = 180
expected_acc_genes_in_region = []
expected_low_freq_genes_in_region = []
expected_core_gene_pair_distance = {'Sequence_break--pan_gene_1': 89}
expected_accessory_gene_content = {'Sequence_break--pan_gene_1': []}
expected_low_freq_gene_content = {'Sequence_break--pan_gene_1': []}
expected_core_gene_pairs = ['Sequence_break--pan_gene_1']
expected_master_info = {'Sequence_break--pan_gene_1--gff_name': ['gff_name', 'Sequence_break', 'pan_gene_1',
89, 0, [], []]}
return_previous_core_gene_id, return_previous_core_gene_end_coor, return_acc_genes_in_region, \
return_low_freq_genes_in_region, return_core_gene_pair_distance, return_accessory_gene_content, \
return_low_freq_gene_content, return_core_gene_pairs, \
return_master_info = gff_parser.record_core_core_region(core_genes, gff_name, gff_line, contig_end,
previous_core_gene_id, previous_core_gene_end_coor,
acc_genes_in_region, low_freq_genes_in_region,
core_gene_pair_distance, accessory_gene_content,
low_freq_gene_content, core_gene_pairs,
master_info)
self.assertEqual(expected_previous_core_gene_id, return_previous_core_gene_id)
self.assertEqual(expected_previous_core_gene_end_coor, return_previous_core_gene_end_coor)
self.assertEqual(expected_acc_genes_in_region, return_acc_genes_in_region)
self.assertEqual(expected_low_freq_genes_in_region, return_low_freq_genes_in_region)
self.assertEqual(expected_accessory_gene_content, return_accessory_gene_content)
self.assertEqual(expected_low_freq_gene_content, return_low_freq_gene_content)
self.assertEqual(expected_core_gene_pair_distance, return_core_gene_pair_distance)
self.assertEqual(expected_core_gene_pairs, return_core_gene_pairs)
self.assertEqual(expected_master_info, return_master_info)
def test_recording_first_core_gene_on_contig_w_accessory(self):
core_genes = {'gff_name': {'Core_ID_1': 'pan_gene_1'}}
gff_name = 'gff_name'
gff_line = ['gff_name_contig_1', '.', 'CDS', '90', '180', '.', '.', '.', 'Core_ID_1']
contig_end = None
previous_core_gene_id = 'Sequence_break'
previous_core_gene_end_coor = 150
acc_genes_in_region = ['acc_1', 'acc_2', 'acc_3']
low_freq_genes_in_region = []
core_gene_pair_distance = {}
accessory_gene_content = {}
low_freq_gene_content = {}
core_gene_pairs = []
master_info = {}
# Set up the expected return values
expected_previous_core_gene_id = 'Core_ID_1'
expected_previous_core_gene_end_coor = 180
expected_acc_genes_in_region = []
expected_low_freq_genes_in_region = []
expected_core_gene_pair_distance = {'Sequence_break--pan_gene_1': 89}
expected_accessory_gene_content = {'Sequence_break--pan_gene_1': ['acc_1', 'acc_2', 'acc_3']}
expected_low_freq_gene_content = {'Sequence_break--pan_gene_1': []}
expected_core_gene_pairs = ['Sequence_break--pan_gene_1']
expected_master_info = {'Sequence_break--pan_gene_1--gff_name': ['gff_name', 'Sequence_break', 'pan_gene_1',
89, 3, ['acc_1', 'acc_2', 'acc_3'], []]}
return_previous_core_gene_id, return_previous_core_gene_end_coor, return_acc_genes_in_region, \
return_low_freq_genes_in_region, return_core_gene_pair_distance, return_accessory_gene_content, \
return_low_freq_gene_content, return_core_gene_pairs, \
return_master_info = gff_parser.record_core_core_region(core_genes, gff_name, gff_line, contig_end,
previous_core_gene_id, previous_core_gene_end_coor,
acc_genes_in_region, low_freq_genes_in_region,
core_gene_pair_distance, accessory_gene_content,
low_freq_gene_content, core_gene_pairs,
master_info)
self.assertEqual(expected_previous_core_gene_id, return_previous_core_gene_id)
self.assertEqual(expected_previous_core_gene_end_coor, return_previous_core_gene_end_coor)
self.assertEqual(expected_acc_genes_in_region, return_acc_genes_in_region)
self.assertEqual(expected_low_freq_genes_in_region, return_low_freq_genes_in_region)
self.assertEqual(expected_accessory_gene_content, return_accessory_gene_content)
self.assertEqual(expected_low_freq_gene_content, return_low_freq_gene_content)
self.assertEqual(expected_core_gene_pair_distance, return_core_gene_pair_distance)
self.assertEqual(expected_core_gene_pairs, return_core_gene_pairs)
self.assertEqual(expected_master_info, return_master_info)
class TestConnectFirstNLastGeneOnContig(unittest.TestCase):
"""
Test for the function recordning connections between the first and the last gene on a contig in a complete genome
"""
def test_connect_last_n_first_gene_different_genes_no_accessory(self):
core_genes = {'gff_name': {'Core_ID_1': 'pan_gene_1',
'Core_ID_2': 'pan_gene_2'}}
gff_name = 'gff_name'
previous_core_gene_id = "Core_ID_2"
previous_core_gene_end_coor = 1450
first_core_gene_gff_line = ['gff_name_contig_1', '.', 'CDS', '90', '180', '.', '.', '.', 'Core_ID_1']
acc_genes_in_region = []
first_core_accessory_content = []
low_freq_genes_in_region = []
first_core_low_freq_genes = []
contig_size = 1500
core_gene_pairs = []
core_gene_pair_distance = {}
accessory_gene_content = {}
low_freq_gene_content = {}
master_info = {}
# Set up the expected return values
expected_previous_core_gene_id = ""
expected_previous_core_gene_end_coor = 180
expected_acc_genes_in_region = []
expected_low_freq_genes_in_region = []
expected_core_gene_pairs = ['pan_gene_1--pan_gene_2']
expected_core_gene_pair_distance = {'pan_gene_1--pan_gene_2': 139}
expected_accessory_gene_content = {'pan_gene_1--pan_gene_2': []}
expected_low_freq_gene_content = {'pan_gene_1--pan_gene_2': []}
expected_master_info = {'pan_gene_1--pan_gene_2--gff_name': ['gff_name', 'pan_gene_1', 'pan_gene_2', 139, 0, [], []]}
return_previous_core_gene_id, return_previous_core_gene_end_coor, return_acc_genes_in_region, \
return_low_freq_genes_in_region, return_core_gene_pairs, return_core_gene_pair_distance, \
return_accessory_gene_content, return_low_freq_gene_content, \
return_master_info = gff_parser.connect_first_n_last_gene_on_contig(core_genes, gff_name, previous_core_gene_id,
previous_core_gene_end_coor,
first_core_gene_gff_line,
acc_genes_in_region,
first_core_accessory_content,
low_freq_genes_in_region,
first_core_low_freq_genes, contig_size,
core_gene_pairs, core_gene_pair_distance,
accessory_gene_content,
low_freq_gene_content, master_info)
self.assertEqual(expected_previous_core_gene_id, return_previous_core_gene_id)
self.assertEqual(expected_previous_core_gene_end_coor, return_previous_core_gene_end_coor)
self.assertEqual(expected_acc_genes_in_region, return_acc_genes_in_region)
self.assertEqual(expected_low_freq_genes_in_region, return_low_freq_genes_in_region)
self.assertEqual(expected_accessory_gene_content, return_accessory_gene_content)
self.assertEqual(expected_low_freq_gene_content, return_low_freq_gene_content)
self.assertEqual(expected_core_gene_pair_distance, return_core_gene_pair_distance)
self.assertEqual(expected_core_gene_pairs, return_core_gene_pairs)
self.assertEqual(expected_master_info, return_master_info)
def test_connect_last_n_first_gene_different_genes_w_accessory(self):
core_genes = {'gff_name': {'Core_ID_1': 'pan_gene_1',
'Core_ID_2': 'pan_gene_2'}}
gff_name = 'gff_name'
previous_core_gene_id = "Core_ID_2"
previous_core_gene_end_coor = 1450
first_core_gene_gff_line = ['gff_name_contig_1', '.', 'CDS', '90', '180', '.', '.', '.', 'Core_ID_1']
acc_genes_in_region = ['acc_1']
first_core_accessory_content = ['first_acc_1']
low_freq_genes_in_region = ['low_acc_1']
first_core_low_freq_genes = ['first_low_1']
contig_size = 1500
core_gene_pairs = []
core_gene_pair_distance = {}
accessory_gene_content = {}
low_freq_gene_content = {}
master_info = {}
# Set up the expected return values
expected_previous_core_gene_id = ""
expected_previous_core_gene_end_coor = 180
expected_acc_genes_in_region = []
expected_low_freq_genes_in_region = []
expected_core_gene_pairs = ['pan_gene_1--pan_gene_2']
expected_core_gene_pair_distance = {'pan_gene_1--pan_gene_2': 139}
expected_accessory_gene_content = {'pan_gene_1--pan_gene_2': ['acc_1', 'first_acc_1']}
expected_low_freq_gene_content = {'pan_gene_1--pan_gene_2': ['first_low_1', 'low_acc_1']}
expected_master_info = {'pan_gene_1--pan_gene_2--gff_name': ['gff_name', 'pan_gene_1', 'pan_gene_2', 139, 4, ['acc_1', 'first_acc_1'], ['first_low_1', 'low_acc_1']]}
return_previous_core_gene_id, return_previous_core_gene_end_coor, return_acc_genes_in_region, \
return_low_freq_genes_in_region, return_core_gene_pairs, return_core_gene_pair_distance, \
return_accessory_gene_content, return_low_freq_gene_content, \
return_master_info = gff_parser.connect_first_n_last_gene_on_contig(core_genes, gff_name, previous_core_gene_id,
previous_core_gene_end_coor,
first_core_gene_gff_line,
acc_genes_in_region,
first_core_accessory_content,
low_freq_genes_in_region,
first_core_low_freq_genes, contig_size,
core_gene_pairs, core_gene_pair_distance,
accessory_gene_content,
low_freq_gene_content, master_info)
# Assert expected against returned
self.assertEqual(expected_previous_core_gene_id, return_previous_core_gene_id)
self.assertEqual(expected_previous_core_gene_end_coor, return_previous_core_gene_end_coor)
self.assertEqual(expected_acc_genes_in_region, return_acc_genes_in_region)
self.assertEqual(expected_low_freq_genes_in_region, return_low_freq_genes_in_region)
self.assertEqual(expected_accessory_gene_content, return_accessory_gene_content)
self.assertEqual(expected_low_freq_gene_content, return_low_freq_gene_content)
self.assertEqual(expected_core_gene_pair_distance, return_core_gene_pair_distance)
self.assertEqual(expected_core_gene_pairs, return_core_gene_pairs)
self.assertEqual(expected_master_info, return_master_info)
def test_connect_same_gene_as_last_n_first_gene_no_accessory(self):
core_genes = {'gff_name': {'Core_ID_1': 'pan_gene_1',
'Core_ID_2': 'pan_gene_2'}}
gff_name = 'gff_name'
previous_core_gene_id = "Core_ID_1"
previous_core_gene_end_coor = 180
first_core_gene_gff_line = ['gff_name_contig_1', '.', 'CDS', '90', '180', '.', '.', '.', 'Core_ID_1']
acc_genes_in_region = []
first_core_accessory_content = []
low_freq_genes_in_region = []
first_core_low_freq_genes = []
contig_size = 1500
core_gene_pairs = []
core_gene_pair_distance = {}
accessory_gene_content = {}
low_freq_gene_content = {}
master_info = {}
# Set up the expected return values
expected_previous_core_gene_id = ""
expected_previous_core_gene_end_coor = 180
expected_acc_genes_in_region = []
expected_low_freq_genes_in_region = []
expected_core_gene_pairs = ['pan_gene_1--pan_gene_1']
expected_core_gene_pair_distance = {'pan_gene_1--pan_gene_1': 1409}
expected_accessory_gene_content = {'pan_gene_1--pan_gene_1': []}
expected_low_freq_gene_content = {'pan_gene_1--pan_gene_1': []}
expected_master_info = {'pan_gene_1--pan_gene_1--gff_name': ['gff_name', 'pan_gene_1', 'pan_gene_1', 1409, 0, [], []]}
return_previous_core_gene_id, return_previous_core_gene_end_coor, return_acc_genes_in_region, \
return_low_freq_genes_in_region, return_core_gene_pairs, return_core_gene_pair_distance, \
return_accessory_gene_content, return_low_freq_gene_content, \
return_master_info = gff_parser.connect_first_n_last_gene_on_contig(core_genes, gff_name, previous_core_gene_id,
previous_core_gene_end_coor,
first_core_gene_gff_line,
acc_genes_in_region,
first_core_accessory_content,
low_freq_genes_in_region,
first_core_low_freq_genes, contig_size,
core_gene_pairs, core_gene_pair_distance,
accessory_gene_content,
low_freq_gene_content, master_info)
self.assertEqual(expected_previous_core_gene_id, return_previous_core_gene_id)
self.assertEqual(expected_previous_core_gene_end_coor, return_previous_core_gene_end_coor)
self.assertEqual(expected_acc_genes_in_region, return_acc_genes_in_region)
self.assertEqual(expected_low_freq_genes_in_region, return_low_freq_genes_in_region)
self.assertEqual(expected_accessory_gene_content, return_accessory_gene_content)
self.assertEqual(expected_low_freq_gene_content, return_low_freq_gene_content)
self.assertEqual(expected_core_gene_pair_distance, return_core_gene_pair_distance)
self.assertEqual(expected_core_gene_pairs, return_core_gene_pairs)
self.assertEqual(expected_master_info, return_master_info)
def test_connect_same_gene_as_last_n_first_gene_w_accessory(self):
core_genes = {'gff_name': {'Core_ID_1': 'pan_gene_1',
'Core_ID_2': 'pan_gene_2'}}
gff_name = 'gff_name'
previous_core_gene_id = "Core_ID_1"
previous_core_gene_end_coor = 180
first_core_gene_gff_line = ['gff_name_contig_1', '.', 'CDS', '90', '180', '.', '.', '.', 'Core_ID_1']
acc_genes_in_region = ['acc_2', 'acc_3']
first_core_accessory_content = ['acc_1']
low_freq_genes_in_region = ['low_1', 'low_2']
first_core_low_freq_genes = ['low_3']
contig_size = 1500
core_gene_pairs = []
core_gene_pair_distance = {}
accessory_gene_content = {}
low_freq_gene_content = {}
master_info = {}
# Set up the expected return values
expected_previous_core_gene_id = ""
expected_previous_core_gene_end_coor = 180
expected_acc_genes_in_region = []
expected_low_freq_genes_in_region = []
expected_core_gene_pairs = ['pan_gene_1--pan_gene_1']
expected_core_gene_pair_distance = {'pan_gene_1--pan_gene_1': 1409}
expected_accessory_gene_content = {'pan_gene_1--pan_gene_1': ['acc_1', 'acc_2', 'acc_3']}
expected_low_freq_gene_content = {'pan_gene_1--pan_gene_1': ['low_1', 'low_2', 'low_3']}
expected_master_info = {'pan_gene_1--pan_gene_1--gff_name': ['gff_name', 'pan_gene_1', 'pan_gene_1', 1409, 6, ['acc_1', 'acc_2', 'acc_3'], ['low_1', 'low_2', 'low_3']]}
return_previous_core_gene_id, return_previous_core_gene_end_coor, return_acc_genes_in_region, \
return_low_freq_genes_in_region, return_core_gene_pairs, return_core_gene_pair_distance, \
return_accessory_gene_content, return_low_freq_gene_content, \
return_master_info = gff_parser.connect_first_n_last_gene_on_contig(core_genes, gff_name, previous_core_gene_id,
previous_core_gene_end_coor,
first_core_gene_gff_line,
acc_genes_in_region,
first_core_accessory_content,
low_freq_genes_in_region,
first_core_low_freq_genes, contig_size,
core_gene_pairs, core_gene_pair_distance,
accessory_gene_content,
low_freq_gene_content, master_info)
self.assertEqual(expected_previous_core_gene_id, return_previous_core_gene_id)
self.assertEqual(expected_previous_core_gene_end_coor, return_previous_core_gene_end_coor)
self.assertEqual(expected_acc_genes_in_region, return_acc_genes_in_region)
self.assertEqual(expected_low_freq_genes_in_region, return_low_freq_genes_in_region)
self.assertEqual(expected_accessory_gene_content, return_accessory_gene_content)
self.assertEqual(expected_low_freq_gene_content, return_low_freq_gene_content)
self.assertEqual(expected_core_gene_pair_distance, return_core_gene_pair_distance)
self.assertEqual(expected_core_gene_pairs, return_core_gene_pairs)
self.assertEqual(expected_master_info, return_master_info)
class TestRecordCorelessContig(unittest.TestCase):
"""
Test for function that records a contig on which there is no core genes.
"""
def test_adding_coreless_contig(self):
coreless_contigs = {}
acc_genes_in_region = ['acc_1']
low_freq_genes_in_region = ['low_1']
gff_name = 'gff_name'
contig_name = 'gff_contig_1'
expected_return = {'gff_name--gff_contig_1': [['acc_1'], ['low_1']]}
return_dict = gff_parser.record_coreless_contig(coreless_contigs, acc_genes_in_region, low_freq_genes_in_region, gff_name, contig_name)
self.assertEqual(expected_return, return_dict)
def test_not_adding_coreless_contig(self):
coreless_contigs = {}
acc_genes_in_region = []
low_freq_genes_in_region = []
gff_name = 'gff_name'
contig_name = 'gff_contig_1'
expected_return = {}
return_dict = gff_parser.record_coreless_contig(coreless_contigs, acc_genes_in_region,
low_freq_genes_in_region, gff_name, contig_name)
self.assertEqual(expected_return, return_dict)
class TestSegmentingMockGffs(unittest.TestCase):
"""
Tests for function that takes in a gff file and segments it into core-core regions
"""
def test_single_chromosome_complete(self):
# Set up input
gff_generator = [['gff_name_contig_1', '.', 'CDS', '90', '180', '.', '.', '.', 'acc_ID_1'],
['gff_name_contig_1', '.', 'CDS', '179', '250', '.', '.', '.', 'Core_ID_1'],
['gff_name_contig_1', '.', 'CDS', '300', '425', '.', '.', '.', 'low_freq_1'],
['gff_name_contig_1', '.', 'CDS', '450', '500', '.', '.', '.', 'acc_ID_2'],
['gff_name_contig_1', '.', 'CDS', '610', '680', '.', '.', '.', 'Core_ID_2'],
['gff_name_contig_1', '.', 'CDS', '700', '850', '.', '.', '.', 'low_freq_2'],
['gff_name_contig_1', '.', 'CDS', '950', '1000', '.', '.', '.', 'Core_ID_3'],
['gff_name_contig_1', '.', 'CDS', '1100', '1250', '.', '.', '.', 'low_freq_3']]
core_genes = {'test_single_chromosome': {'Core_ID_1': 'pan_gene_2',
'Core_ID_2': 'pan_gene_5',
'Core_ID_3': 'pan_gene_7'}}
low_freq_genes = {'test_single_chromosome': {'low_freq_1': 'pan_gene_3',
'low_freq_2': 'pan_gene_6',
'low_freq_3': 'pan_gene_8'}}
gff_path = 'TestSegmentingMockGffs/test_single_chromosome.gff'
acc_genes = {'test_single_chromosome': {'acc_ID_1': 'pan_gene_1',
'acc_ID_2': 'pan_gene_4'}}
complete_genomes = ['test_single_chromosome']
# Set up expected outputs
core_gene_pairs = ['pan_gene_2--pan_gene_5', 'pan_gene_5--pan_gene_7', 'pan_gene_2--pan_gene_7']
core_gene_pair_distance = {'pan_gene_2--pan_gene_5': 359,
'pan_gene_5--pan_gene_7': 269,
'pan_gene_2--pan_gene_7': 478}
accessory_gene_content = {'pan_gene_2--pan_gene_5': ['pan_gene_4'],
'pan_gene_5--pan_gene_7': [],
'pan_gene_2--pan_gene_7': ['pan_gene_1']}
low_freq_gene_content = {'pan_gene_2--pan_gene_5': ['pan_gene_3'],
'pan_gene_5--pan_gene_7': ['pan_gene_6'],
'pan_gene_2--pan_gene_7': ['pan_gene_8']}
master_info = {'pan_gene_2--pan_gene_5--test_single_chromosome': ['test_single_chromosome', 'pan_gene_2', 'pan_gene_5', 359, 2, ['pan_gene_4'], ['pan_gene_3'], ],
'pan_gene_5--pan_gene_7--test_single_chromosome': ['test_single_chromosome', 'pan_gene_5', 'pan_gene_7', 269, 1, [], ['pan_gene_6']],
'pan_gene_2--pan_gene_7--test_single_chromosome': ['test_single_chromosome', 'pan_gene_2', 'pan_gene_7', 478, 2, ['pan_gene_1'], ['pan_gene_8']]}
coreless_contigs = {}
# Run function
return_core_gene_pairs, return_core_gene_pair_distance, \
return_accessory_gene_content, return_low_freq_gene_content, \
return_master_info, return_coreless_contigs = gff_parser.segment_gff_content(gff_generator, core_genes,
low_freq_genes, gff_path,
acc_genes,
complete_genomes)
# Evaluate
self.assertEqual(core_gene_pairs, return_core_gene_pairs)
self.assertEqual(core_gene_pair_distance, return_core_gene_pair_distance)
self.assertEqual(accessory_gene_content, return_accessory_gene_content)
self.assertEqual(low_freq_gene_content, return_low_freq_gene_content)
self.assertEqual(master_info, return_master_info)
self.assertEqual(coreless_contigs, return_coreless_contigs)
def test_single_chromosome_draft(self):
# Set up input
gff_generator = [['gff_name_contig_1', '.', 'CDS', '90', '180', '.', '.', '.', 'acc_ID_1'],
['gff_name_contig_1', '.', 'CDS', '179', '250', '.', '.', '.', 'Core_ID_1'],
['gff_name_contig_1', '.', 'CDS', '300', '425', '.', '.', '.', 'low_freq_1'],
['gff_name_contig_1', '.', 'CDS', '450', '500', '.', '.', '.', 'acc_ID_2'],
['gff_name_contig_1', '.', 'CDS', '610', '680', '.', '.', '.', 'Core_ID_2'],
['gff_name_contig_1', '.', 'CDS', '700', '850', '.', '.', '.', 'low_freq_2'],
['gff_name_contig_1', '.', 'CDS', '950', '1000', '.', '.', '.', 'Core_ID_3'],
['gff_name_contig_1', '.', 'CDS', '1100', '1250', '.', '.', '.', 'low_freq_3']]
core_genes = {'test_single_chromosome': {'Core_ID_1': 'pan_gene_2',
'Core_ID_2': 'pan_gene_5',
'Core_ID_3': 'pan_gene_7'}}
low_freq_genes = {'test_single_chromosome': {'low_freq_1': 'pan_gene_3',
'low_freq_2': 'pan_gene_6',
'low_freq_3': 'pan_gene_8'}}
gff_path = 'TestSegmentingMockGffs/test_single_chromosome.gff'
acc_genes = {'test_single_chromosome': {'acc_ID_1': 'pan_gene_1',
'acc_ID_2': 'pan_gene_4'}}
complete_genomes = []
# Set up expected outputs
core_gene_pairs = ['pan_gene_2--pan_gene_5', 'pan_gene_5--pan_gene_7', 'Sequence_break--pan_gene_2', 'pan_gene_7--Sequence_break']
core_gene_pair_distance = {'pan_gene_2--pan_gene_5': 359,
'pan_gene_5--pan_gene_7': 269,
'Sequence_break--pan_gene_2': 178,
'pan_gene_7--Sequence_break': 300}
accessory_gene_content = {'pan_gene_2--pan_gene_5': ['pan_gene_4'],
'pan_gene_5--pan_gene_7': [],
'Sequence_break--pan_gene_2': ['pan_gene_1'],
'pan_gene_7--Sequence_break': []}
low_freq_gene_content = {'pan_gene_2--pan_gene_5': ['pan_gene_3'],
'pan_gene_5--pan_gene_7': ['pan_gene_6'],
'Sequence_break--pan_gene_2': [],
'pan_gene_7--Sequence_break': ['pan_gene_8']}
master_info = {
'pan_gene_2--pan_gene_5--test_single_chromosome': ['test_single_chromosome', 'pan_gene_2', 'pan_gene_5', 359, 2, ['pan_gene_4'], ['pan_gene_3'], ],
'pan_gene_5--pan_gene_7--test_single_chromosome': ['test_single_chromosome', 'pan_gene_5', 'pan_gene_7', 269, 1, [], ['pan_gene_6']],
'Sequence_break--pan_gene_2--test_single_chromosome': ['test_single_chromosome', 'Sequence_break', 'pan_gene_2', 178, 1, ['pan_gene_1'], []],
'pan_gene_7--Sequence_break--test_single_chromosome': ['test_single_chromosome', 'pan_gene_7', 'Sequence_break', 300, 1, [], ['pan_gene_8']]}
coreless_contigs = {}
# Run function
return_core_gene_pairs, return_core_gene_pair_distance, \
return_accessory_gene_content, return_low_freq_gene_content, \
return_master_info, return_coreless_contigs = gff_parser.segment_gff_content(gff_generator, core_genes,
low_freq_genes, gff_path,
acc_genes,
complete_genomes)
# Evaluate
self.assertEqual(core_gene_pairs, return_core_gene_pairs)
self.assertEqual(core_gene_pair_distance, return_core_gene_pair_distance)
self.assertEqual(accessory_gene_content, return_accessory_gene_content)
self.assertEqual(low_freq_gene_content, return_low_freq_gene_content)
self.assertEqual(master_info, return_master_info)
self.assertEqual(coreless_contigs, return_coreless_contigs)
def test_two_chromosomes_complete(self):
# Set up input
gff_generator = [
# Contig 1 annotations
['gff_name_contig_1', '.', 'CDS', '90', '180', '.', '.', '.', 'acc_ID_1'],
['gff_name_contig_1', '.', 'CDS', '179', '250', '.', '.', '.', 'Core_ID_1'],
['gff_name_contig_1', '.', 'CDS', '300', '425', '.', '.', '.', 'low_freq_1'],
['gff_name_contig_1', '.', 'CDS', '450', '500', '.', '.', '.', 'acc_ID_2'],
['gff_name_contig_1', '.', 'CDS', '610', '680', '.', '.', '.', 'Core_ID_2'],
['gff_name_contig_1', '.', 'CDS', '700', '850', '.', '.', '.', 'low_freq_2'],
['gff_name_contig_1', '.', 'CDS', '950', '1000', '.', '.', '.', 'Core_ID_3'],
['gff_name_contig_1', '.', 'CDS', '1100', '1250', '.', '.', '.', 'low_freq_3'],
# Contig 2 annotations
['gff_name_contig_2', '.', 'CDS', '90', '180', '.', '.', '.', 'acc_ID_3'],
['gff_name_contig_2', '.', 'CDS', '179', '250', '.', '.', '.', 'Core_ID_4'],
['gff_name_contig_2', '.', 'CDS', '300', '425', '.', '.', '.', 'low_freq_4'],
['gff_name_contig_2', '.', 'CDS', '450', '500', '.', '.', '.', 'acc_ID_4'],
['gff_name_contig_2', '.', 'CDS', '610', '680', '.', '.', '.', 'Core_ID_5'],
['gff_name_contig_2', '.', 'CDS', '700', '850', '.', '.', '.', 'low_freq_5'],
['gff_name_contig_2', '.', 'CDS', '950', '1000', '.', '.', '.', 'Core_ID_6'],
['gff_name_contig_2', '.', 'CDS', '1100', '1250', '.', '.', '.', 'low_freq_6']
]
core_genes = {'test_double_chromosome': {'Core_ID_1': 'pan_gene_2',
'Core_ID_2': 'pan_gene_5',
'Core_ID_3': 'pan_gene_7',
'Core_ID_4': 'pan_gene_10',
'Core_ID_5': 'pan_gene_13',
'Core_ID_6': 'pan_gene_15'
}}
low_freq_genes = {'test_double_chromosome': {'low_freq_1': 'pan_gene_3',
'low_freq_2': 'pan_gene_6',
'low_freq_3': 'pan_gene_8',
'low_freq_4': 'pan_gene_11',
'low_freq_5': 'pan_gene_14',
'low_freq_6': 'pan_gene_16'}}
gff_path = 'TestSegmentingMockGffs/test_double_chromosome.gff'
acc_genes = {'test_double_chromosome': {'acc_ID_1': 'pan_gene_1',
'acc_ID_2': 'pan_gene_4',
'acc_ID_3': 'pan_gene_9',
'acc_ID_4': 'pan_gene_12'}}
complete_genomes = ['test_double_chromosome']
# Set up expected outputs
core_gene_pairs = ['pan_gene_2--pan_gene_5', 'pan_gene_5--pan_gene_7', 'pan_gene_2--pan_gene_7',
'pan_gene_10--pan_gene_13', 'pan_gene_13--pan_gene_15', 'pan_gene_10--pan_gene_15']
core_gene_pair_distance = {'pan_gene_2--pan_gene_5': 359,
'pan_gene_5--pan_gene_7': 269,
'pan_gene_2--pan_gene_7': 478,
'pan_gene_10--pan_gene_13': 359,
'pan_gene_13--pan_gene_15': 269,
'pan_gene_10--pan_gene_15': 478}
accessory_gene_content = {'pan_gene_2--pan_gene_5': ['pan_gene_4'],
'pan_gene_5--pan_gene_7': [],
'pan_gene_2--pan_gene_7': ['pan_gene_1'],
'pan_gene_10--pan_gene_13': ['pan_gene_12'],
'pan_gene_13--pan_gene_15': [],
'pan_gene_10--pan_gene_15': ['pan_gene_9']
}
low_freq_gene_content = {'pan_gene_2--pan_gene_5': ['pan_gene_3'],
'pan_gene_5--pan_gene_7': ['pan_gene_6'],
'pan_gene_2--pan_gene_7': ['pan_gene_8'],
'pan_gene_10--pan_gene_13': ['pan_gene_11'],
'pan_gene_13--pan_gene_15': ['pan_gene_14'],
'pan_gene_10--pan_gene_15': ['pan_gene_16'],
}
master_info = {
'pan_gene_2--pan_gene_5--test_double_chromosome': ['test_double_chromosome', 'pan_gene_2', 'pan_gene_5', 359, 2, ['pan_gene_4'], ['pan_gene_3'], ],
'pan_gene_5--pan_gene_7--test_double_chromosome': ['test_double_chromosome', 'pan_gene_5', 'pan_gene_7', 269, 1, [], ['pan_gene_6']],
'pan_gene_2--pan_gene_7--test_double_chromosome': ['test_double_chromosome', 'pan_gene_2', 'pan_gene_7', 478, 2, ['pan_gene_1'], ['pan_gene_8']],
'pan_gene_10--pan_gene_13--test_double_chromosome': ['test_double_chromosome', 'pan_gene_10', 'pan_gene_13', 359, 2, ['pan_gene_12'], ['pan_gene_11']],
'pan_gene_13--pan_gene_15--test_double_chromosome': ['test_double_chromosome', 'pan_gene_13', 'pan_gene_15', 269, 1, [], ['pan_gene_14']],
'pan_gene_10--pan_gene_15--test_double_chromosome': ['test_double_chromosome', 'pan_gene_10', 'pan_gene_15', 478, 2, ['pan_gene_9'], ['pan_gene_16']]
}
coreless_contigs = {}
# Run function
return_core_gene_pairs, return_core_gene_pair_distance, \
return_accessory_gene_content, return_low_freq_gene_content, \
return_master_info, return_coreless_contigs = gff_parser.segment_gff_content(gff_generator, core_genes,
low_freq_genes, gff_path,
acc_genes,
complete_genomes)
# Evaluate
self.assertEqual(core_gene_pairs, return_core_gene_pairs)
self.assertEqual(core_gene_pair_distance, return_core_gene_pair_distance)
self.assertEqual(accessory_gene_content, return_accessory_gene_content)
self.assertEqual(low_freq_gene_content, return_low_freq_gene_content)
self.assertEqual(master_info, return_master_info)
self.assertEqual(coreless_contigs, return_coreless_contigs)
def test_two_daft_contigs(self):
# Set up input
gff_generator = [
# Contig 1 annotations
['gff_name_contig_1', '.', 'CDS', '90', '180', '.', '.', '.', 'acc_ID_1'],
['gff_name_contig_1', '.', 'CDS', '179', '250', '.', '.', '.', 'Core_ID_1'],
['gff_name_contig_1', '.', 'CDS', '300', '425', '.', '.', '.', 'low_freq_1'],
['gff_name_contig_1', '.', 'CDS', '450', '500', '.', '.', '.', 'acc_ID_2'],
['gff_name_contig_1', '.', 'CDS', '610', '680', '.', '.', '.', 'Core_ID_2'],
['gff_name_contig_1', '.', 'CDS', '700', '850', '.', '.', '.', 'low_freq_2'],
['gff_name_contig_1', '.', 'CDS', '950', '1000', '.', '.', '.', 'Core_ID_3'],
['gff_name_contig_1', '.', 'CDS', '1100', '1250', '.', '.', '.', 'low_freq_3'],
# Contig 2 annotations
['gff_name_contig_2', '.', 'CDS', '179', '250', '.', '.', '.', 'Core_ID_4'],
['gff_name_contig_2', '.', 'CDS', '300', '425', '.', '.', '.', 'low_freq_4'],
['gff_name_contig_2', '.', 'CDS', '450', '500', '.', '.', '.', 'acc_ID_4'],
['gff_name_contig_2', '.', 'CDS', '610', '680', '.', '.', '.', 'Core_ID_5'],
['gff_name_contig_2', '.', 'CDS', '700', '850', '.', '.', '.', 'low_freq_5'],
['gff_name_contig_2', '.', 'CDS', '950', '1000', '.', '.', '.', 'Core_ID_6'],
]
core_genes = {'test_double_chromosome': {'Core_ID_1': 'pan_gene_2',
'Core_ID_2': 'pan_gene_5',
'Core_ID_3': 'pan_gene_7',
'Core_ID_4': 'pan_gene_10',
'Core_ID_5': 'pan_gene_13',
'Core_ID_6': 'pan_gene_15'
}}
low_freq_genes = {'test_double_chromosome': {'low_freq_1': 'pan_gene_3',
'low_freq_2': 'pan_gene_6',
'low_freq_3': 'pan_gene_8',
'low_freq_4': 'pan_gene_11',
'low_freq_5': 'pan_gene_14'}}
gff_path = 'TestSegmentingMockGffs/test_double_chromosome.gff'
acc_genes = {'test_double_chromosome': {'acc_ID_1': 'pan_gene_1',
'acc_ID_2': 'pan_gene_4',
'acc_ID_4': 'pan_gene_12'}}
complete_genomes = []
# Set up expected outputs
core_gene_pairs = ['pan_gene_2--pan_gene_5', 'pan_gene_5--pan_gene_7',
'Sequence_break--pan_gene_2', 'pan_gene_7--Sequence_break',
'pan_gene_10--pan_gene_13', 'pan_gene_13--pan_gene_15',
'Sequence_break--pan_gene_10', 'pan_gene_15--Sequence_break']
core_gene_pair_distance = {'pan_gene_2--pan_gene_5': 359,
'pan_gene_5--pan_gene_7': 269,
'Sequence_break--pan_gene_2': 178,
'pan_gene_7--Sequence_break': 300,
'pan_gene_10--pan_gene_13': 359,
'pan_gene_13--pan_gene_15': 269,
'Sequence_break--pan_gene_10': 178,
'pan_gene_15--Sequence_break': 300}
accessory_gene_content = {'pan_gene_2--pan_gene_5': ['pan_gene_4'],
'pan_gene_5--pan_gene_7': [],
'Sequence_break--pan_gene_2': ['pan_gene_1'],
'pan_gene_7--Sequence_break': [],
'pan_gene_10--pan_gene_13': ['pan_gene_12'],
'pan_gene_13--pan_gene_15': [],
'Sequence_break--pan_gene_10': [],
'pan_gene_15--Sequence_break': []
}
low_freq_gene_content = {'pan_gene_2--pan_gene_5': ['pan_gene_3'],
'pan_gene_5--pan_gene_7': ['pan_gene_6'],
'Sequence_break--pan_gene_2': [],
'pan_gene_7--Sequence_break': ['pan_gene_8'],
'pan_gene_10--pan_gene_13': ['pan_gene_11'],
'pan_gene_13--pan_gene_15': ['pan_gene_14'],
'Sequence_break--pan_gene_10': [],
'pan_gene_15--Sequence_break': []
}
master_info = {
'pan_gene_2--pan_gene_5--test_double_chromosome': ['test_double_chromosome', 'pan_gene_2', 'pan_gene_5',
359, 2, ['pan_gene_4'], ['pan_gene_3'], ],
'pan_gene_5--pan_gene_7--test_double_chromosome': ['test_double_chromosome', 'pan_gene_5', 'pan_gene_7',
269, 1, [], ['pan_gene_6']],
'Sequence_break--pan_gene_2--test_double_chromosome': ['test_double_chromosome', 'Sequence_break', 'pan_gene_2', 178, 1, ['pan_gene_1'], []],
'pan_gene_7--Sequence_break--test_double_chromosome': ['test_double_chromosome', 'pan_gene_7', 'Sequence_break', 300, 1, [], ['pan_gene_8']],
'pan_gene_10--pan_gene_13--test_double_chromosome': ['test_double_chromosome', 'pan_gene_10', 'pan_gene_13',
359, 2, ['pan_gene_12'], ['pan_gene_11']],
'pan_gene_13--pan_gene_15--test_double_chromosome': ['test_double_chromosome', 'pan_gene_13', 'pan_gene_15',
269, 1, [], ['pan_gene_14']],
'Sequence_break--pan_gene_10--test_double_chromosome': ['test_double_chromosome', 'Sequence_break', 'pan_gene_10', 178, 0, [], []],
'pan_gene_15--Sequence_break--test_double_chromosome': ['test_double_chromosome', 'pan_gene_15', 'Sequence_break', 300, 0, [], []]
}
coreless_contigs = {}
# Run function
return_core_gene_pairs, return_core_gene_pair_distance, \
return_accessory_gene_content, return_low_freq_gene_content, \
return_master_info, return_coreless_contigs = gff_parser.segment_gff_content(gff_generator, core_genes,
low_freq_genes, gff_path,
acc_genes,
complete_genomes)
# Evaluate
self.assertEqual(core_gene_pairs.sort(), return_core_gene_pairs.sort())
self.assertEqual(core_gene_pair_distance, return_core_gene_pair_distance)
self.assertEqual(accessory_gene_content, return_accessory_gene_content)
self.assertEqual(low_freq_gene_content, return_low_freq_gene_content)
self.assertEqual(master_info, return_master_info)
self.assertEqual(coreless_contigs, return_coreless_contigs)
def test_with_coreless_contig_draft_last_contig(self):
# Set up input
gff_generator = [
# Contig 1 annotations
['gff_name_contig_1', '.', 'CDS', '90', '180', '.', '.', '.', 'acc_ID_1'],
['gff_name_contig_1', '.', 'CDS', '179', '250', '.', '.', '.', 'Core_ID_1'],
['gff_name_contig_1', '.', 'CDS', '300', '425', '.', '.', '.', 'low_freq_1'],
['gff_name_contig_1', '.', 'CDS', '450', '500', '.', '.', '.', 'acc_ID_2'],
['gff_name_contig_1', '.', 'CDS', '610', '680', '.', '.', '.', 'Core_ID_2'],
['gff_name_contig_1', '.', 'CDS', '700', '850', '.', '.', '.', 'low_freq_2'],
['gff_name_contig_1', '.', 'CDS', '950', '1000', '.', '.', '.', 'Core_ID_3'],
['gff_name_contig_1', '.', 'CDS', '1100', '1250', '.', '.', '.', 'low_freq_3'],
# Contig 2 annotations
['gff_name_contig_2', '.', 'CDS', '300', '425', '.', '.', '.', 'low_freq_4'],
['gff_name_contig_2', '.', 'CDS', '450', '500', '.', '.', '.', 'acc_ID_4'],
['gff_name_contig_2', '.', 'CDS', '700', '850', '.', '.', '.', 'low_freq_5'],
]
core_genes = {'test_double_chromosome': {'Core_ID_1': 'pan_gene_2',
'Core_ID_2': 'pan_gene_5',
'Core_ID_3': 'pan_gene_7'}}
low_freq_genes = {'test_double_chromosome': {'low_freq_1': 'pan_gene_3',
'low_freq_2': 'pan_gene_6',
'low_freq_3': 'pan_gene_8',
'low_freq_4': 'pan_gene_11',
'low_freq_5': 'pan_gene_14'}}
gff_path = 'TestSegmentingMockGffs/test_double_chromosome.gff'
acc_genes = {'test_double_chromosome': {'acc_ID_1': 'pan_gene_1',
'acc_ID_2': 'pan_gene_4',
'acc_ID_4': 'pan_gene_12'}}
complete_genomes = []
# Set up expected outputs
core_gene_pairs = ['pan_gene_2--pan_gene_5', 'pan_gene_5--pan_gene_7',
'Sequence_break--pan_gene_2', 'pan_gene_7--Sequence_break']
core_gene_pair_distance = {'pan_gene_2--pan_gene_5': 359,
'pan_gene_5--pan_gene_7': 269,
'Sequence_break--pan_gene_2': 178,
'pan_gene_7--Sequence_break': 300}
accessory_gene_content = {'pan_gene_2--pan_gene_5': ['pan_gene_4'],
'pan_gene_5--pan_gene_7': [],
'Sequence_break--pan_gene_2': ['pan_gene_1'],
'pan_gene_7--Sequence_break': []
}
low_freq_gene_content = {'pan_gene_2--pan_gene_5': ['pan_gene_3'],
'pan_gene_5--pan_gene_7': ['pan_gene_6'],
'Sequence_break--pan_gene_2': [],
'pan_gene_7--Sequence_break': ['pan_gene_8']
}
master_info = {
'pan_gene_2--pan_gene_5--test_double_chromosome': ['test_double_chromosome', 'pan_gene_2', 'pan_gene_5',
359, 2, ['pan_gene_4'], ['pan_gene_3'], ],
'pan_gene_5--pan_gene_7--test_double_chromosome': ['test_double_chromosome', 'pan_gene_5', 'pan_gene_7',
269, 1, [], ['pan_gene_6']],
'Sequence_break--pan_gene_2--test_double_chromosome': ['test_double_chromosome', 'Sequence_break',
'pan_gene_2', 178, 1, ['pan_gene_1'], []],
'pan_gene_7--Sequence_break--test_double_chromosome': ['test_double_chromosome', 'pan_gene_7',
'Sequence_break', 300, 1, [], ['pan_gene_8']]
}
coreless_contigs = {'test_double_chromosome--gff_name_contig_2': [['pan_gene_12'], ['pan_gene_11', 'pan_gene_14']]}
# Run function
return_core_gene_pairs, return_core_gene_pair_distance, \
return_accessory_gene_content, return_low_freq_gene_content, \
return_master_info, return_coreless_contigs = gff_parser.segment_gff_content(gff_generator, core_genes,
low_freq_genes, gff_path,
acc_genes,
complete_genomes)
# Evaluate
self.assertEqual(core_gene_pairs.sort(), return_core_gene_pairs.sort())
self.assertEqual(core_gene_pair_distance, return_core_gene_pair_distance)
self.assertEqual(accessory_gene_content, return_accessory_gene_content)
self.assertEqual(low_freq_gene_content, return_low_freq_gene_content)
self.assertEqual(master_info, return_master_info)
self.assertEqual(coreless_contigs, return_coreless_contigs)
def test_with_coreless_contig_complete_last_contig(self):
# Set up input
gff_generator = [
# Contig 1 annotations
['gff_name_contig_1', '.', 'CDS', '90', '180', '.', '.', '.', 'acc_ID_1'],
['gff_name_contig_1', '.', 'CDS', '179', '250', '.', '.', '.', 'Core_ID_1'],
['gff_name_contig_1', '.', 'CDS', '300', '425', '.', '.', '.', 'low_freq_1'],
['gff_name_contig_1', '.', 'CDS', '450', '500', '.', '.', '.', 'acc_ID_2'],
['gff_name_contig_1', '.', 'CDS', '610', '680', '.', '.', '.', 'Core_ID_2'],
['gff_name_contig_1', '.', 'CDS', '700', '850', '.', '.', '.', 'low_freq_2'],
['gff_name_contig_1', '.', 'CDS', '950', '1000', '.', '.', '.', 'Core_ID_3'],
['gff_name_contig_1', '.', 'CDS', '1100', '1250', '.', '.', '.', 'low_freq_3'],
# Contig 2 annotations
['gff_name_contig_2', '.', 'CDS', '300', '425', '.', '.', '.', 'low_freq_4'],
['gff_name_contig_2', '.', 'CDS', '450', '500', '.', '.', '.', 'acc_ID_4'],
['gff_name_contig_2', '.', 'CDS', '700', '850', '.', '.', '.', 'low_freq_5'],
]
core_genes = {'test_double_chromosome': {'Core_ID_1': 'pan_gene_2',
'Core_ID_2': 'pan_gene_5',
'Core_ID_3': 'pan_gene_7'}}
low_freq_genes = {'test_double_chromosome': {'low_freq_1': 'pan_gene_3',
'low_freq_2': 'pan_gene_6',
'low_freq_3': 'pan_gene_8',
'low_freq_4': 'pan_gene_11',
'low_freq_5': 'pan_gene_14'}}
gff_path = 'TestSegmentingMockGffs/test_double_chromosome.gff'
acc_genes = {'test_double_chromosome': {'acc_ID_1': 'pan_gene_1',
'acc_ID_2': 'pan_gene_4',
'acc_ID_4': 'pan_gene_12'}}
complete_genomes = ['test_double_chromosome']
# Set up expected outputs
core_gene_pairs = ['pan_gene_2--pan_gene_5', 'pan_gene_5--pan_gene_7',
'pan_gene_2--pan_gene_7']
core_gene_pair_distance = {'pan_gene_2--pan_gene_5': 359,
'pan_gene_5--pan_gene_7': 269,
'pan_gene_2--pan_gene_7': 478}
accessory_gene_content = {'pan_gene_2--pan_gene_5': ['pan_gene_4'],
'pan_gene_5--pan_gene_7': [],
'pan_gene_2--pan_gene_7': ['pan_gene_1']
}
low_freq_gene_content = {'pan_gene_2--pan_gene_5': ['pan_gene_3'],
'pan_gene_5--pan_gene_7': ['pan_gene_6'],
'pan_gene_2--pan_gene_7': ['pan_gene_8']
}
master_info = {
'pan_gene_2--pan_gene_5--test_double_chromosome': ['test_double_chromosome', 'pan_gene_2', 'pan_gene_5',
359, 2, ['pan_gene_4'], ['pan_gene_3'], ],
'pan_gene_5--pan_gene_7--test_double_chromosome': ['test_double_chromosome', 'pan_gene_5', 'pan_gene_7',
269, 1, [], ['pan_gene_6']],
'pan_gene_2--pan_gene_7--test_double_chromosome': ['test_double_chromosome', 'pan_gene_2', 'pan_gene_7', 478, 2, ['pan_gene_1'], ['pan_gene_8']]
}
coreless_contigs = {
'test_double_chromosome--gff_name_contig_2': [['pan_gene_12'], ['pan_gene_11', 'pan_gene_14']]}
# Run function
return_core_gene_pairs, return_core_gene_pair_distance, \
return_accessory_gene_content, return_low_freq_gene_content, \
return_master_info, return_coreless_contigs = gff_parser.segment_gff_content(gff_generator, core_genes,
low_freq_genes, gff_path,
acc_genes,
complete_genomes)
# Evaluate
self.assertEqual(core_gene_pairs.sort(), return_core_gene_pairs.sort())
self.assertEqual(core_gene_pair_distance, return_core_gene_pair_distance)
self.assertEqual(accessory_gene_content, return_accessory_gene_content)
self.assertEqual(low_freq_gene_content, return_low_freq_gene_content)
self.assertEqual(master_info, return_master_info)
self.assertEqual(coreless_contigs, return_coreless_contigs)
def test_with_coreless_contig_draft_first_contig(self):
# Set up input
gff_generator = [
# Contig 1 annotations
['gff_name_contig_1', '.', 'CDS', '300', '425', '.', '.', '.', 'low_freq_4'],
['gff_name_contig_1', '.', 'CDS', '450', '500', '.', '.', '.', 'acc_ID_4'],
['gff_name_contig_1', '.', 'CDS', '700', '850', '.', '.', '.', 'low_freq_5'],
# Contig 2 annotations
['gff_name_contig_2', '.', 'CDS', '90', '180', '.', '.', '.', 'acc_ID_1'],
['gff_name_contig_2', '.', 'CDS', '179', '250', '.', '.', '.', 'Core_ID_1'],
['gff_name_contig_2', '.', 'CDS', '300', '425', '.', '.', '.', 'low_freq_1'],
['gff_name_contig_2', '.', 'CDS', '450', '500', '.', '.', '.', 'acc_ID_2'],
['gff_name_contig_2', '.', 'CDS', '610', '680', '.', '.', '.', 'Core_ID_2'],
['gff_name_contig_2', '.', 'CDS', '700', '850', '.', '.', '.', 'low_freq_2'],
['gff_name_contig_2', '.', 'CDS', '950', '1000', '.', '.', '.', 'Core_ID_3'],
['gff_name_contig_2', '.', 'CDS', '1100', '1250', '.', '.', '.', 'low_freq_3']
]
core_genes = {'test_double_chromosome': {'Core_ID_1': 'pan_gene_2',
'Core_ID_2': 'pan_gene_5',
'Core_ID_3': 'pan_gene_7'}}
low_freq_genes = {'test_double_chromosome': {'low_freq_1': 'pan_gene_3',
'low_freq_2': 'pan_gene_6',
'low_freq_3': 'pan_gene_8',
'low_freq_4': 'pan_gene_11',
'low_freq_5': 'pan_gene_14'}}
gff_path = 'TestSegmentingMockGffs/test_double_chromosome.gff'
acc_genes = {'test_double_chromosome': {'acc_ID_1': 'pan_gene_1',
'acc_ID_2': 'pan_gene_4',
'acc_ID_4': 'pan_gene_12'}}
complete_genomes = []
# Set up expected outputs
core_gene_pairs = ['pan_gene_2--pan_gene_5', 'pan_gene_5--pan_gene_7',
'Sequence_break--pan_gene_2', 'pan_gene_7--Sequence_break']
core_gene_pair_distance = {'pan_gene_2--pan_gene_5': 359,
'pan_gene_5--pan_gene_7': 269,
'Sequence_break--pan_gene_2': 178,
'pan_gene_7--Sequence_break': 300}
accessory_gene_content = {'pan_gene_2--pan_gene_5': ['pan_gene_4'],
'pan_gene_5--pan_gene_7': [],
'Sequence_break--pan_gene_2': ['pan_gene_1'],
'pan_gene_7--Sequence_break': []
}
low_freq_gene_content = {'pan_gene_2--pan_gene_5': ['pan_gene_3'],
'pan_gene_5--pan_gene_7': ['pan_gene_6'],
'Sequence_break--pan_gene_2': [],
'pan_gene_7--Sequence_break': ['pan_gene_8']
}
master_info = {
'pan_gene_2--pan_gene_5--test_double_chromosome': ['test_double_chromosome', 'pan_gene_2', 'pan_gene_5',
359, 2, ['pan_gene_4'], ['pan_gene_3'], ],
'pan_gene_5--pan_gene_7--test_double_chromosome': ['test_double_chromosome', 'pan_gene_5', 'pan_gene_7',
269, 1, [], ['pan_gene_6']],
'Sequence_break--pan_gene_2--test_double_chromosome': ['test_double_chromosome', 'Sequence_break',
'pan_gene_2', 178, 1, ['pan_gene_1'], []],
'pan_gene_7--Sequence_break--test_double_chromosome': ['test_double_chromosome', 'pan_gene_7',
'Sequence_break', 300, 1, [], ['pan_gene_8']]
}
coreless_contigs = {
'test_double_chromosome--gff_name_contig_1': [['pan_gene_12'], ['pan_gene_11', 'pan_gene_14']]}
# Run function
return_core_gene_pairs, return_core_gene_pair_distance, \
return_accessory_gene_content, return_low_freq_gene_content, \
return_master_info, return_coreless_contigs = gff_parser.segment_gff_content(gff_generator, core_genes,
low_freq_genes, gff_path,
acc_genes,
complete_genomes)
# Evaluate
self.assertEqual(core_gene_pairs.sort(), return_core_gene_pairs.sort())
self.assertEqual(core_gene_pair_distance, return_core_gene_pair_distance)
self.assertEqual(accessory_gene_content, return_accessory_gene_content)
self.assertEqual(low_freq_gene_content, return_low_freq_gene_content)
self.assertEqual(master_info, return_master_info)
self.assertEqual(coreless_contigs, return_coreless_contigs)
def test_with_coreless_contig_middle_contig(self):
# Set up input
gff_generator = [
# Contig 1 annotations
['gff_name_contig_1', '.', 'CDS', '90', '180', '.', '.', '.', 'acc_ID_1'],
['gff_name_contig_1', '.', 'CDS', '179', '250', '.', '.', '.', 'Core_ID_1'],
['gff_name_contig_1', '.', 'CDS', '300', '425', '.', '.', '.', 'low_freq_1'],
['gff_name_contig_1', '.', 'CDS', '450', '500', '.', '.', '.', 'acc_ID_2'],
['gff_name_contig_1', '.', 'CDS', '610', '680', '.', '.', '.', 'Core_ID_2'],
['gff_name_contig_1', '.', 'CDS', '700', '850', '.', '.', '.', 'low_freq_2'],
['gff_name_contig_1', '.', 'CDS', '950', '1000', '.', '.', '.', 'Core_ID_3'],
['gff_name_contig_1', '.', 'CDS', '1100', '1250', '.', '.', '.', 'low_freq_3'],
# Contig 2 annotations
['gff_name_contig_2', '.', 'CDS', '300', '425', '.', '.', '.', 'low_freq_4'],
['gff_name_contig_2', '.', 'CDS', '450', '500', '.', '.', '.', 'acc_ID_4'],
['gff_name_contig_2', '.', 'CDS', '700', '850', '.', '.', '.', 'low_freq_5'],
# Contig 3 annotations
['gff_name_contig_3', '.', 'CDS', '179', '250', '.', '.', '.', 'Core_ID_4'],
['gff_name_contig_3', '.', 'CDS', '610', '680', '.', '.', '.', 'Core_ID_5']
]
core_genes = {'test_triple_chromosome': {'Core_ID_1': 'pan_gene_2',
'Core_ID_2': 'pan_gene_5',
'Core_ID_3': 'pan_gene_7',
'Core_ID_4': 'pan_gene_10',
'Core_ID_5': 'pan_gene_13'
}}
low_freq_genes = {'test_triple_chromosome': {'low_freq_1': 'pan_gene_3',
'low_freq_2': 'pan_gene_6',
'low_freq_3': 'pan_gene_8',
'low_freq_4': 'pan_gene_11',
'low_freq_5': 'pan_gene_14'}}
gff_path = 'TestSegmentingMockGffs/test_triple_chromosome.gff'
acc_genes = {'test_triple_chromosome': {'acc_ID_1': 'pan_gene_1',
'acc_ID_2': 'pan_gene_4',
'acc_ID_4': 'pan_gene_12'}}
complete_genomes = []
# Construct expected results
core_gene_pairs = ['pan_gene_2--pan_gene_5', 'pan_gene_5--pan_gene_7',
'Sequence_break--pan_gene_2', 'pan_gene_7--Sequence_break',
'Sequence_break--pan_gene_10', 'pan_gene_10--pan_gene_13', 'pan_gene_13--Sequence_break']
core_gene_pair_distance = {'pan_gene_2--pan_gene_5': 359,
'pan_gene_5--pan_gene_7': 269,
'Sequence_break--pan_gene_2': 178,
'pan_gene_7--Sequence_break': 300,
'Sequence_break--pan_gene_10': 178,
'pan_gene_10--pan_gene_13': 359,
'pan_gene_13--Sequence_break': 620}
accessory_gene_content = {'pan_gene_2--pan_gene_5': ['pan_gene_4'],
'pan_gene_5--pan_gene_7': [],
'Sequence_break--pan_gene_2': ['pan_gene_1'],
'pan_gene_7--Sequence_break': [],
'Sequence_break--pan_gene_10': [],
'pan_gene_10--pan_gene_13': [],
'pan_gene_13--Sequence_break': []
}
low_freq_gene_content = {'pan_gene_2--pan_gene_5': ['pan_gene_3'],
'pan_gene_5--pan_gene_7': ['pan_gene_6'],
'Sequence_break--pan_gene_2': [],
'pan_gene_7--Sequence_break': ['pan_gene_8'],
'Sequence_break--pan_gene_10': [],
'pan_gene_10--pan_gene_13': [],
'pan_gene_13--Sequence_break': []
}
master_info = {
'pan_gene_2--pan_gene_5--test_triple_chromosome': ['test_triple_chromosome', 'pan_gene_2', 'pan_gene_5',
359, 2, ['pan_gene_4'], ['pan_gene_3'], ],
'pan_gene_5--pan_gene_7--test_triple_chromosome': ['test_triple_chromosome', 'pan_gene_5', 'pan_gene_7',
269, 1, [], ['pan_gene_6']],
'Sequence_break--pan_gene_2--test_triple_chromosome': ['test_triple_chromosome', 'Sequence_break',
'pan_gene_2', 178, 1, ['pan_gene_1'], []],
'pan_gene_7--Sequence_break--test_triple_chromosome': ['test_triple_chromosome', 'pan_gene_7',
'Sequence_break', 300, 1, [], ['pan_gene_8']],
'Sequence_break--pan_gene_10--test_triple_chromosome': ['test_triple_chromosome', 'Sequence_break', 'pan_gene_10', 178, 0, [], []],
'pan_gene_10--pan_gene_13--test_triple_chromosome': ['test_triple_chromosome', 'pan_gene_10', 'pan_gene_13', 359, 0, [], []],
'pan_gene_13--Sequence_break--test_triple_chromosome': ['test_triple_chromosome', 'pan_gene_13', 'Sequence_break', 620, 0, [], []]
}
coreless_contigs = {
'test_triple_chromosome--gff_name_contig_2': [['pan_gene_12'], ['pan_gene_11', 'pan_gene_14']]}
# Run function
return_core_gene_pairs, return_core_gene_pair_distance, \
return_accessory_gene_content, return_low_freq_gene_content, \
return_master_info, return_coreless_contigs = gff_parser.segment_gff_content(gff_generator, core_genes,
low_freq_genes, gff_path,
acc_genes,
complete_genomes)
# Sort expected and returned lists in dicts
low_freq_gene_content = {x: sorted(low_freq_gene_content[x]) for x in
low_freq_gene_content.keys()}
accessory_gene_content = {x: sorted(accessory_gene_content[x]) for x in
accessory_gene_content.keys()}
master_info = {
x: [sorted(element) if element is list else element for element in master_info[x]] for x in
master_info.keys()}
return_low_freq_gene_content = {x: sorted(return_low_freq_gene_content[x]) for x in
return_low_freq_gene_content.keys()}
return_accessory_gene_content = {x: sorted(return_accessory_gene_content[x]) for x in
return_accessory_gene_content.keys()}
return_master_info = {x: [sorted(element) if element is list else element for element in return_master_info[x]]
for x in return_master_info.keys()}
# Evaluate
self.assertEqual(core_gene_pairs.sort(), return_core_gene_pairs.sort())
self.assertEqual(core_gene_pair_distance, return_core_gene_pair_distance)
self.assertEqual(accessory_gene_content, return_accessory_gene_content)
self.assertEqual(low_freq_gene_content, return_low_freq_gene_content)
self.assertEqual(master_info, return_master_info)
self.assertEqual(coreless_contigs, return_coreless_contigs)
def test_single_core_on_contig(self):
# Set up input
gff_generator = [
# Contig 1 annotations
['gff_name_contig_1', '.', 'CDS', '90', '180', '.', '.', '.', 'acc_ID_1'],
['gff_name_contig_1', '.', 'CDS', '179', '250', '.', '.', '.', 'Core_ID_1'],
['gff_name_contig_1', '.', 'CDS', '300', '425', '.', '.', '.', 'low_freq_1'],
['gff_name_contig_1', '.', 'CDS', '450', '500', '.', '.', '.', 'acc_ID_2'],
['gff_name_contig_1', '.', 'CDS', '610', '680', '.', '.', '.', 'Core_ID_2'],
['gff_name_contig_1', '.', 'CDS', '700', '850', '.', '.', '.', 'low_freq_2'],
['gff_name_contig_1', '.', 'CDS', '950', '1000', '.', '.', '.', 'Core_ID_3'],
['gff_name_contig_1', '.', 'CDS', '1100', '1250', '.', '.', '.', 'low_freq_3'],
# Contig 2 annotations
['gff_name_contig_2', '.', 'CDS', '300', '425', '.', '.', '.', 'low_freq_4'],
['gff_name_contig_2', '.', 'CDS', '450', '500', '.', '.', '.', 'acc_ID_4'],
['gff_name_contig_2', '.', 'CDS', '700', '850', '.', '.', '.', 'low_freq_5'],
['gff_name_contig_2', '.', 'CDS', '950', '1000', '.', '.', '.', 'Core_ID_4']
]
core_genes = {'test_double_chromosome': {'Core_ID_1': 'pan_gene_2',
'Core_ID_2': 'pan_gene_5',
'Core_ID_3': 'pan_gene_7',
'Core_ID_4': 'pan_gene_15'}}
low_freq_genes = {'test_double_chromosome': {'low_freq_1': 'pan_gene_3',
'low_freq_2': 'pan_gene_6',
'low_freq_3': 'pan_gene_8',
'low_freq_4': 'pan_gene_11',
'low_freq_5': 'pan_gene_14'}}
gff_path = 'TestSegmentingMockGffs/test_double_chromosome.gff'
acc_genes = {'test_double_chromosome': {'acc_ID_1': 'pan_gene_1',
'acc_ID_2': 'pan_gene_4',
'acc_ID_4': 'pan_gene_12'}}
complete_genomes = ['test_double_chromosome']
# Set up expected outputs
core_gene_pairs = ['pan_gene_2--pan_gene_5', 'pan_gene_5--pan_gene_7',
'pan_gene_2--pan_gene_7', 'pan_gene_15--pan_gene_15']
core_gene_pair_distance = {'pan_gene_2--pan_gene_5': 359,
'pan_gene_5--pan_gene_7': 269,
'pan_gene_2--pan_gene_7': 478,
'pan_gene_15--pan_gene_15': 1249}
accessory_gene_content = {'pan_gene_2--pan_gene_5': ['pan_gene_4'],
'pan_gene_5--pan_gene_7': [],
'pan_gene_2--pan_gene_7': ['pan_gene_1'],
'pan_gene_15--pan_gene_15': ['pan_gene_12']
}
low_freq_gene_content = {'pan_gene_2--pan_gene_5': ['pan_gene_3'],
'pan_gene_5--pan_gene_7': ['pan_gene_6'],
'pan_gene_2--pan_gene_7': ['pan_gene_8'],
'pan_gene_15--pan_gene_15': ['pan_gene_11', 'pan_gene_14']
}
master_info = {
'pan_gene_2--pan_gene_5--test_double_chromosome': ['test_double_chromosome', 'pan_gene_2', 'pan_gene_5',
359, 2, ['pan_gene_4'], ['pan_gene_3'], ],
'pan_gene_5--pan_gene_7--test_double_chromosome': ['test_double_chromosome', 'pan_gene_5', 'pan_gene_7',
269, 1, [], ['pan_gene_6']],
'pan_gene_2--pan_gene_7--test_double_chromosome': ['test_double_chromosome', 'pan_gene_2', 'pan_gene_7',
478, 2, ['pan_gene_1'], ['pan_gene_8']],
'pan_gene_15--pan_gene_15--test_double_chromosome': ['test_double_chromosome', 'pan_gene_15', 'pan_gene_15',
1249, 3, ['pan_gene_12'], ['pan_gene_11', 'pan_gene_14']]
}
coreless_contigs = {}
# Run function
return_core_gene_pairs, return_core_gene_pair_distance, \
return_accessory_gene_content, return_low_freq_gene_content, \
return_master_info, return_coreless_contigs = gff_parser.segment_gff_content(gff_generator, core_genes,
low_freq_genes, gff_path,
acc_genes,
complete_genomes)
# Evaluate
self.assertEqual(core_gene_pairs.sort(), return_core_gene_pairs.sort())
self.assertEqual(core_gene_pair_distance, return_core_gene_pair_distance)
self.assertEqual(accessory_gene_content, return_accessory_gene_content)
self.assertEqual(low_freq_gene_content, return_low_freq_gene_content)
self.assertEqual(master_info, return_master_info)
self.assertEqual(coreless_contigs, return_coreless_contigs)
def test_segmentation_of_fragmented_core_gene(self):
# Set up input
gff_generator = [['gff_name_contig_1', '.', 'CDS', '90', '180', '.', '.', '.', 'acc_ID_1'],
['gff_name_contig_1', '.', 'CDS', '179', '250', '.', '.', '.', 'Core_ID_1_1'],
['gff_name_contig_1', '.', 'CDS', '251', '270', '.', '.', '.', 'Core_ID_1_2'],
['gff_name_contig_1', '.', 'CDS', '300', '425', '.', '.', '.', 'low_freq_1'],
['gff_name_contig_1', '.', 'CDS', '450', '500', '.', '.', '.', 'acc_ID_2'],
['gff_name_contig_1', '.', 'CDS', '610', '680', '.', '.', '.', 'Core_ID_2'],
['gff_name_contig_1', '.', 'CDS', '700', '850', '.', '.', '.', 'low_freq_2'],
['gff_name_contig_1', '.', 'CDS', '950', '1000', '.', '.', '.', 'Core_ID_3'],
['gff_name_contig_1', '.', 'CDS', '1100', '1250', '.', '.', '.', 'low_freq_3']]
core_genes = {'test_single_chromosome': {'Core_ID_1_1': 'pan_gene_2',
'Core_ID_1_2': 'pan_gene_2',
'Core_ID_2': 'pan_gene_5',
'Core_ID_3': 'pan_gene_7'}}
low_freq_genes = {'test_single_chromosome': {'low_freq_1': 'pan_gene_3',
'low_freq_2': 'pan_gene_6',
'low_freq_3': 'pan_gene_8'}}
gff_path = 'TestSegmentingMockGffs/test_single_chromosome.gff'
acc_genes = {'test_single_chromosome': {'acc_ID_1': 'pan_gene_1',
'acc_ID_2': 'pan_gene_4'}}
complete_genomes = ['test_single_chromosome']
# Set up expected outputs
core_gene_pairs = ['pan_gene_2--pan_gene_5', 'pan_gene_5--pan_gene_7', 'pan_gene_2--pan_gene_7']
core_gene_pair_distance = {'pan_gene_2--pan_gene_5': 339,
'pan_gene_5--pan_gene_7': 269,
'pan_gene_2--pan_gene_7': 478}
accessory_gene_content = {'pan_gene_2--pan_gene_5': ['pan_gene_4'],
'pan_gene_5--pan_gene_7': [],
'pan_gene_2--pan_gene_7': ['pan_gene_1']}
low_freq_gene_content = {'pan_gene_2--pan_gene_5': ['pan_gene_3'],
'pan_gene_5--pan_gene_7': ['pan_gene_6'],
'pan_gene_2--pan_gene_7': ['pan_gene_8']}
master_info = {
'pan_gene_2--pan_gene_5--test_single_chromosome': ['test_single_chromosome', 'pan_gene_2', 'pan_gene_5',
339, 2, ['pan_gene_4'], ['pan_gene_3'], ],
'pan_gene_5--pan_gene_7--test_single_chromosome': ['test_single_chromosome', 'pan_gene_5', 'pan_gene_7',
269, 1, [], ['pan_gene_6']],
'pan_gene_2--pan_gene_7--test_single_chromosome': ['test_single_chromosome', 'pan_gene_2', 'pan_gene_7',
478, 2, ['pan_gene_1'], ['pan_gene_8']]}
coreless_contigs = {}
# Run function
return_core_gene_pairs, return_core_gene_pair_distance, \
return_accessory_gene_content, return_low_freq_gene_content, \
return_master_info, return_coreless_contigs = gff_parser.segment_gff_content(gff_generator, core_genes,
low_freq_genes, gff_path,
acc_genes,
complete_genomes)
# Evaluate
self.assertEqual(core_gene_pairs, return_core_gene_pairs)
self.assertEqual(core_gene_pair_distance, return_core_gene_pair_distance)
self.assertEqual(accessory_gene_content, return_accessory_gene_content)
self.assertEqual(low_freq_gene_content, return_low_freq_gene_content)
self.assertEqual(master_info, return_master_info)
self.assertEqual(coreless_contigs, return_coreless_contigs)
def test_segmentation_of_fragmented_core_gene_lone_contig(self):
# Set up input
gff_generator = [
# Contig 1 annotations
['gff_name_contig_1', '.', 'CDS', '90', '180', '.', '.', '.', 'acc_ID_1'],
['gff_name_contig_1', '.', 'CDS', '179', '250', '.', '.', '.', 'Core_ID_1'],
['gff_name_contig_1', '.', 'CDS', '300', '425', '.', '.', '.', 'low_freq_1'],
['gff_name_contig_1', '.', 'CDS', '450', '500', '.', '.', '.', 'acc_ID_2'],
['gff_name_contig_1', '.', 'CDS', '610', '680', '.', '.', '.', 'Core_ID_2'],
['gff_name_contig_1', '.', 'CDS', '700', '850', '.', '.', '.', 'low_freq_2'],
['gff_name_contig_1', '.', 'CDS', '950', '1000', '.', '.', '.', 'Core_ID_3'],
['gff_name_contig_1', '.', 'CDS', '1100', '1250', '.', '.', '.', 'low_freq_3'],
# Contig 2 annotations
['gff_name_contig_2', '.', 'CDS', '300', '425', '.', '.', '.', 'low_freq_4'],
['gff_name_contig_2', '.', 'CDS', '450', '500', '.', '.', '.', 'acc_ID_4'],
['gff_name_contig_2', '.', 'CDS', '700', '850', '.', '.', '.', 'low_freq_5'],
['gff_name_contig_2', '.', 'CDS', '950', '1000', '.', '.', '.', 'Core_ID_4_1'],
['gff_name_contig_2', '.', 'CDS', '10020', '1030', '.', '.', '.', 'Core_ID_4_2']
]
core_genes = {'test_double_chromosome': {'Core_ID_1': 'pan_gene_2',
'Core_ID_2': 'pan_gene_5',
'Core_ID_3': 'pan_gene_7',
'Core_ID_4_1': 'pan_gene_15',
'Core_ID_4_2': 'pan_gene_15'}}
low_freq_genes = {'test_double_chromosome': {'low_freq_1': 'pan_gene_3',
'low_freq_2': 'pan_gene_6',
'low_freq_3': 'pan_gene_8',
'low_freq_4': 'pan_gene_11',
'low_freq_5': 'pan_gene_14'}}
gff_path = 'TestSegmentingMockGffs/test_double_chromosome.gff'
acc_genes = {'test_double_chromosome': {'acc_ID_1': 'pan_gene_1',
'acc_ID_2': 'pan_gene_4',
'acc_ID_4': 'pan_gene_12'}}
complete_genomes = ['test_double_chromosome']
# Set up expected outputs
core_gene_pairs = ['pan_gene_2--pan_gene_5', 'pan_gene_5--pan_gene_7',
'pan_gene_2--pan_gene_7', 'pan_gene_15--pan_gene_15']
core_gene_pair_distance = {'pan_gene_2--pan_gene_5': 359,
'pan_gene_5--pan_gene_7': 269,
'pan_gene_2--pan_gene_7': 478,
'pan_gene_15--pan_gene_15': 1219}
accessory_gene_content = {'pan_gene_2--pan_gene_5': ['pan_gene_4'],
'pan_gene_5--pan_gene_7': [],
'pan_gene_2--pan_gene_7': ['pan_gene_1'],
'pan_gene_15--pan_gene_15': ['pan_gene_12']
}
low_freq_gene_content = {'pan_gene_2--pan_gene_5': ['pan_gene_3'],
'pan_gene_5--pan_gene_7': ['pan_gene_6'],
'pan_gene_2--pan_gene_7': ['pan_gene_8'],
'pan_gene_15--pan_gene_15': ['pan_gene_11', 'pan_gene_14']
}
master_info = {
'pan_gene_2--pan_gene_5--test_double_chromosome': ['test_double_chromosome', 'pan_gene_2', 'pan_gene_5',
359, 2, ['pan_gene_4'], ['pan_gene_3'], ],
'pan_gene_5--pan_gene_7--test_double_chromosome': ['test_double_chromosome', 'pan_gene_5', 'pan_gene_7',
269, 1, [], ['pan_gene_6']],
'pan_gene_2--pan_gene_7--test_double_chromosome': ['test_double_chromosome', 'pan_gene_2', 'pan_gene_7',
478, 2, ['pan_gene_1'], ['pan_gene_8']],
'pan_gene_15--pan_gene_15--test_double_chromosome': ['test_double_chromosome', 'pan_gene_15', 'pan_gene_15',
1219, 3, ['pan_gene_12'],
['pan_gene_11', 'pan_gene_14']]
}
coreless_contigs = {}
# Run function
return_core_gene_pairs, return_core_gene_pair_distance, \
return_accessory_gene_content, return_low_freq_gene_content, \
return_master_info, return_coreless_contigs = gff_parser.segment_gff_content(gff_generator, core_genes,
low_freq_genes, gff_path,
acc_genes,
complete_genomes)
# Evaluate
self.assertEqual(core_gene_pairs.sort(), return_core_gene_pairs.sort())
self.assertEqual(core_gene_pair_distance, return_core_gene_pair_distance)
self.assertEqual(accessory_gene_content, return_accessory_gene_content)
self.assertEqual(low_freq_gene_content, return_low_freq_gene_content)
self.assertEqual(master_info, return_master_info)
self.assertEqual(coreless_contigs, return_coreless_contigs)
def test_segmentation_of_fragmented_acc_gene(self):
# Set up input
gff_generator = [['gff_name_contig_1', '.', 'CDS', '90', '180', '.', '.', '.', 'acc_ID_1'],
['gff_name_contig_1', '.', 'CDS', '179', '270', '.', '.', '.', 'Core_ID_1'],
['gff_name_contig_1', '.', 'CDS', '300', '425', '.', '.', '.', 'low_freq_1'],
['gff_name_contig_1', '.', 'CDS', '450', '460', '.', '.', '.', 'acc_ID_2_1'],
['gff_name_contig_1', '.', 'CDS', '470', '500', '.', '.', '.', 'acc_ID_2_2'],
['gff_name_contig_1', '.', 'CDS', '610', '680', '.', '.', '.', 'Core_ID_2'],
['gff_name_contig_1', '.', 'CDS', '700', '850', '.', '.', '.', 'low_freq_2'],
['gff_name_contig_1', '.', 'CDS', '950', '1000', '.', '.', '.', 'Core_ID_3'],
['gff_name_contig_1', '.', 'CDS', '1100', '1250', '.', '.', '.', 'low_freq_3']]
core_genes = {'test_single_chromosome': {'Core_ID_1': 'pan_gene_2',
'Core_ID_2': 'pan_gene_5',
'Core_ID_3': 'pan_gene_7'}}
low_freq_genes = {'test_single_chromosome': {'low_freq_1': 'pan_gene_3',
'low_freq_2': 'pan_gene_6',
'low_freq_3': 'pan_gene_8'}}
gff_path = 'TestSegmentingMockGffs/test_single_chromosome.gff'
acc_genes = {'test_single_chromosome': {'acc_ID_1': 'pan_gene_1',
'acc_ID_2_1': 'pan_gene_4',
'acc_ID_2_2': 'pan_gene_4'}}
complete_genomes = ['test_single_chromosome']
# Set up expected outputs
core_gene_pairs = ['pan_gene_2--pan_gene_5', 'pan_gene_5--pan_gene_7', 'pan_gene_2--pan_gene_7']
core_gene_pair_distance = {'pan_gene_2--pan_gene_5': 339,
'pan_gene_5--pan_gene_7': 269,
'pan_gene_2--pan_gene_7': 478}
accessory_gene_content = {'pan_gene_2--pan_gene_5': ['pan_gene_4'],
'pan_gene_5--pan_gene_7': [],
'pan_gene_2--pan_gene_7': ['pan_gene_1']}
low_freq_gene_content = {'pan_gene_2--pan_gene_5': ['pan_gene_3'],
'pan_gene_5--pan_gene_7': ['pan_gene_6'],
'pan_gene_2--pan_gene_7': ['pan_gene_8']}
master_info = {
'pan_gene_2--pan_gene_5--test_single_chromosome': ['test_single_chromosome', 'pan_gene_2', 'pan_gene_5',
339, 2, ['pan_gene_4'], ['pan_gene_3'], ],
'pan_gene_5--pan_gene_7--test_single_chromosome': ['test_single_chromosome', 'pan_gene_5', 'pan_gene_7',
269, 1, [], ['pan_gene_6']],
'pan_gene_2--pan_gene_7--test_single_chromosome': ['test_single_chromosome', 'pan_gene_2', 'pan_gene_7',
478, 2, ['pan_gene_1'], ['pan_gene_8']]}
coreless_contigs = {}
# Run function
return_core_gene_pairs, return_core_gene_pair_distance, \
return_accessory_gene_content, return_low_freq_gene_content, \
return_master_info, return_coreless_contigs = gff_parser.segment_gff_content(gff_generator, core_genes,
low_freq_genes, gff_path,
acc_genes,
complete_genomes)
# Evaluate
self.assertEqual(core_gene_pairs, return_core_gene_pairs)
self.assertEqual(core_gene_pair_distance, return_core_gene_pair_distance)
self.assertEqual(accessory_gene_content, return_accessory_gene_content)
self.assertEqual(low_freq_gene_content, return_low_freq_gene_content)
self.assertEqual(master_info, return_master_info)
self.assertEqual(coreless_contigs, return_coreless_contigs)
def test_segmentation_of_fragmented_acc_gene_between_first_n_last_gene(self):
# Set up input
gff_generator = [['gff_name_contig_1', '.', 'CDS', '90', '180', '.', '.', '.', 'acc_ID_1'],
['gff_name_contig_1', '.', 'CDS', '179', '250', '.', '.', '.', 'Core_ID_1'],
['gff_name_contig_1', '.', 'CDS', '300', '425', '.', '.', '.', 'low_freq_1'],
['gff_name_contig_1', '.', 'CDS', '450', '500', '.', '.', '.', 'acc_ID_2'],
['gff_name_contig_1', '.', 'CDS', '610', '680', '.', '.', '.', 'Core_ID_2'],
['gff_name_contig_1', '.', 'CDS', '700', '850', '.', '.', '.', 'low_freq_2'],
['gff_name_contig_1', '.', 'CDS', '950', '1000', '.', '.', '.', 'Core_ID_3'],
['gff_name_contig_1', '.', 'CDS', '1100', '1150', '.', '.', '.', 'low_freq_3_1'],
['gff_name_contig_1', '.', 'CDS', '1170', '1250', '.', '.', '.', 'low_freq_3_2']]
core_genes = {'test_single_chromosome': {'Core_ID_1': 'pan_gene_2',
'Core_ID_2': 'pan_gene_5',
'Core_ID_3': 'pan_gene_7'}}
low_freq_genes = {'test_single_chromosome': {'low_freq_1': 'pan_gene_3',
'low_freq_2': 'pan_gene_6',
'low_freq_3_1': 'pan_gene_8',
'low_freq_3_2': 'pan_gene_8'}}
gff_path = 'TestSegmentingMockGffs/test_single_chromosome.gff'
acc_genes = {'test_single_chromosome': {'acc_ID_1': 'pan_gene_1',
'acc_ID_2': 'pan_gene_4'}}
complete_genomes = ['test_single_chromosome']
# Set up expected outputs
core_gene_pairs = ['pan_gene_2--pan_gene_5', 'pan_gene_5--pan_gene_7', 'pan_gene_2--pan_gene_7']
core_gene_pair_distance = {'pan_gene_2--pan_gene_5': 359,
'pan_gene_5--pan_gene_7': 269,
'pan_gene_2--pan_gene_7': 478}
accessory_gene_content = {'pan_gene_2--pan_gene_5': ['pan_gene_4'],
'pan_gene_5--pan_gene_7': [],
'pan_gene_2--pan_gene_7': ['pan_gene_1']}
low_freq_gene_content = {'pan_gene_2--pan_gene_5': ['pan_gene_3'],
'pan_gene_5--pan_gene_7': ['pan_gene_6'],
'pan_gene_2--pan_gene_7': ['pan_gene_8']}
master_info = {'pan_gene_2--pan_gene_5--test_single_chromosome': ['test_single_chromosome', 'pan_gene_2', 'pan_gene_5', 359, 2, ['pan_gene_4'], ['pan_gene_3'], ],
'pan_gene_5--pan_gene_7--test_single_chromosome': ['test_single_chromosome', 'pan_gene_5', 'pan_gene_7', 269, 1, [], ['pan_gene_6']],
'pan_gene_2--pan_gene_7--test_single_chromosome': ['test_single_chromosome', 'pan_gene_2', 'pan_gene_7', 478, 2, ['pan_gene_1'], ['pan_gene_8']]}
coreless_contigs = {}
# Run function
return_core_gene_pairs, return_core_gene_pair_distance, \
return_accessory_gene_content, return_low_freq_gene_content, \
return_master_info, return_coreless_contigs = gff_parser.segment_gff_content(gff_generator, core_genes,
low_freq_genes, gff_path,
acc_genes,
complete_genomes)
# Evaluate
self.assertEqual(core_gene_pairs, return_core_gene_pairs)
self.assertEqual(core_gene_pair_distance, return_core_gene_pair_distance)
self.assertEqual(accessory_gene_content, return_accessory_gene_content)
self.assertEqual(low_freq_gene_content, return_low_freq_gene_content)
self.assertEqual(master_info, return_master_info)
self.assertEqual(coreless_contigs, return_coreless_contigs)
def test_segmentation_of_fragmented_acc_gene_on_coreless_contig(self):
# Set up input
gff_generator = [
# Contig 1 annotations
['gff_name_contig_1', '.', 'CDS', '90', '180', '.', '.', '.', 'acc_ID_1'],
['gff_name_contig_1', '.', 'CDS', '179', '250', '.', '.', '.', 'Core_ID_1'],
['gff_name_contig_1', '.', 'CDS', '300', '425', '.', '.', '.', 'low_freq_1'],
['gff_name_contig_1', '.', 'CDS', '450', '500', '.', '.', '.', 'acc_ID_2'],
['gff_name_contig_1', '.', 'CDS', '610', '680', '.', '.', '.', 'Core_ID_2'],
['gff_name_contig_1', '.', 'CDS', '700', '850', '.', '.', '.', 'low_freq_2'],
['gff_name_contig_1', '.', 'CDS', '950', '1000', '.', '.', '.', 'Core_ID_3'],
['gff_name_contig_1', '.', 'CDS', '1100', '1250', '.', '.', '.', 'low_freq_3'],
# Contig 2 annotations
['gff_name_contig_2', '.', 'CDS', '300', '425', '.', '.', '.', 'low_freq_4'],
['gff_name_contig_2', '.', 'CDS', '450', '500', '.', '.', '.', 'acc_ID_4_1'],
['gff_name_contig_2', '.', 'CDS', '501', '503', '.', '.', '.', 'acc_ID_4_2'],
['gff_name_contig_2', '.', 'CDS', '700', '850', '.', '.', '.', 'low_freq_5'],
# Contig 3 annotations
['gff_name_contig_3', '.', 'CDS', '179', '250', '.', '.', '.', 'Core_ID_4'],
['gff_name_contig_3', '.', 'CDS', '610', '680', '.', '.', '.', 'Core_ID_5']
]
core_genes = {'test_triple_chromosome': {'Core_ID_1': 'pan_gene_2',
'Core_ID_2': 'pan_gene_5',
'Core_ID_3': 'pan_gene_7',
'Core_ID_4': 'pan_gene_10',
'Core_ID_5': 'pan_gene_13'
}}
low_freq_genes = {'test_triple_chromosome': {'low_freq_1': 'pan_gene_3',
'low_freq_2': 'pan_gene_6',
'low_freq_3': 'pan_gene_8',
'low_freq_4': 'pan_gene_11',
'low_freq_5': 'pan_gene_14'}}
gff_path = 'TestSegmentingMockGffs/test_triple_chromosome.gff'
acc_genes = {'test_triple_chromosome': {'acc_ID_1': 'pan_gene_1',
'acc_ID_2': 'pan_gene_4',
'acc_ID_4_1': 'pan_gene_12',
'acc_ID_4_2': 'pan_gene_12'}}
complete_genomes = []
# Construct expected results
core_gene_pairs = ['pan_gene_2--pan_gene_5', 'pan_gene_5--pan_gene_7',
'Sequence_break--pan_gene_2', 'pan_gene_7--Sequence_break',
'Sequence_break--pan_gene_10', 'pan_gene_10--pan_gene_13', 'pan_gene_13--Sequence_break']
core_gene_pair_distance = {'pan_gene_2--pan_gene_5': 359,
'pan_gene_5--pan_gene_7': 269,
'Sequence_break--pan_gene_2': 178,
'pan_gene_7--Sequence_break': 300,
'Sequence_break--pan_gene_10': 178,
'pan_gene_10--pan_gene_13': 359,
'pan_gene_13--Sequence_break': 620}
accessory_gene_content = {'pan_gene_2--pan_gene_5': ['pan_gene_4'],
'pan_gene_5--pan_gene_7': [],
'Sequence_break--pan_gene_2': ['pan_gene_1'],
'pan_gene_7--Sequence_break': [],
'Sequence_break--pan_gene_10': [],
'pan_gene_10--pan_gene_13': [],
'pan_gene_13--Sequence_break': []
}
low_freq_gene_content = {'pan_gene_2--pan_gene_5': ['pan_gene_3'],
'pan_gene_5--pan_gene_7': ['pan_gene_6'],
'Sequence_break--pan_gene_2': [],
'pan_gene_7--Sequence_break': ['pan_gene_8'],
'Sequence_break--pan_gene_10': [],
'pan_gene_10--pan_gene_13': [],
'pan_gene_13--Sequence_break': []
}
master_info = {
'pan_gene_2--pan_gene_5--test_triple_chromosome': ['test_triple_chromosome', 'pan_gene_2', 'pan_gene_5',
359, 2, ['pan_gene_4'], ['pan_gene_3'], ],
'pan_gene_5--pan_gene_7--test_triple_chromosome': ['test_triple_chromosome', 'pan_gene_5', 'pan_gene_7',
269, 1, [], ['pan_gene_6']],
'Sequence_break--pan_gene_2--test_triple_chromosome': ['test_triple_chromosome', 'Sequence_break',
'pan_gene_2', 178, 1, ['pan_gene_1'], []],
'pan_gene_7--Sequence_break--test_triple_chromosome': ['test_triple_chromosome', 'pan_gene_7',
'Sequence_break', 300, 1, [], ['pan_gene_8']],
'Sequence_break--pan_gene_10--test_triple_chromosome': ['test_triple_chromosome', 'Sequence_break', 'pan_gene_10', 178, 0, [], []],
'pan_gene_10--pan_gene_13--test_triple_chromosome': ['test_triple_chromosome', 'pan_gene_10', 'pan_gene_13', 359, 0, [], []],
'pan_gene_13--Sequence_break--test_triple_chromosome': ['test_triple_chromosome', 'pan_gene_13', 'Sequence_break', 620, 0, [], []]
}
coreless_contigs = {
'test_triple_chromosome--gff_name_contig_2': [['pan_gene_12'], ['pan_gene_11', 'pan_gene_14']]}
# Run function
return_core_gene_pairs, return_core_gene_pair_distance, \
return_accessory_gene_content, return_low_freq_gene_content, \
return_master_info, return_coreless_contigs = gff_parser.segment_gff_content(gff_generator, core_genes,
low_freq_genes, gff_path,
acc_genes,
complete_genomes)
# Evaluate
self.assertEqual(core_gene_pairs.sort(), return_core_gene_pairs.sort())
self.assertEqual(core_gene_pair_distance, return_core_gene_pair_distance)
self.assertEqual(accessory_gene_content, return_accessory_gene_content)
self.assertEqual(low_freq_gene_content, return_low_freq_gene_content)
self.assertEqual(master_info, return_master_info)
self.assertEqual(coreless_contigs, return_coreless_contigs)
def test_single_fragmented_gene_on_either_side_of_core_gene(self):
# Set up input
gff_generator = [['gff_name_contig_1', '.', 'CDS', '90', '180', '.', '.', '.', 'acc_ID_1'],
['gff_name_contig_1', '.', 'CDS', '179', '270', '.', '.', '.', 'Core_ID_1'],
['gff_name_contig_1', '.', 'CDS', '300', '425', '.', '.', '.', 'low_freq_1'],
['gff_name_contig_1', '.', 'CDS', '450', '460', '.', '.', '.', 'acc_ID_2_1'],
['gff_name_contig_1', '.', 'CDS', '610', '680', '.', '.', '.', 'Core_ID_2'],
['gff_name_contig_1', '.', 'CDS', '685', '690', '.', '.', '.', 'acc_ID_2_2'],
['gff_name_contig_1', '.', 'CDS', '700', '850', '.', '.', '.', 'low_freq_2'],
['gff_name_contig_1', '.', 'CDS', '950', '1000', '.', '.', '.', 'Core_ID_3'],
['gff_name_contig_1', '.', 'CDS', '1100', '1250', '.', '.', '.', 'low_freq_3']]
core_genes = {'test_single_chromosome': {'Core_ID_1': 'pan_gene_2',
'Core_ID_2': 'pan_gene_5',
'Core_ID_3': 'pan_gene_7'}}
low_freq_genes = {'test_single_chromosome': {'low_freq_1': 'pan_gene_3',
'low_freq_2': 'pan_gene_6',
'low_freq_3': 'pan_gene_8'}}
gff_path = 'TestSegmentingMockGffs/test_single_chromosome.gff'
acc_genes = {'test_single_chromosome': {'acc_ID_1': 'pan_gene_1',
'acc_ID_2_1': 'pan_gene_4',
'acc_ID_2_2': 'pan_gene_4'}}
complete_genomes = ['test_single_chromosome']
# Set up expected outputs
core_gene_pairs = ['pan_gene_2--pan_gene_5', 'pan_gene_5--pan_gene_7', 'pan_gene_2--pan_gene_7']
core_gene_pair_distance = {'pan_gene_2--pan_gene_5': 339,
'pan_gene_5--pan_gene_7': 269,
'pan_gene_2--pan_gene_7': 478}
accessory_gene_content = {'pan_gene_2--pan_gene_5': ['pan_gene_4'],
'pan_gene_5--pan_gene_7': ['pan_gene_4'],
'pan_gene_2--pan_gene_7': ['pan_gene_1']}
low_freq_gene_content = {'pan_gene_2--pan_gene_5': ['pan_gene_3'],
'pan_gene_5--pan_gene_7': ['pan_gene_6'],
'pan_gene_2--pan_gene_7': ['pan_gene_8']}
master_info = {
'pan_gene_2--pan_gene_5--test_single_chromosome': ['test_single_chromosome', 'pan_gene_2', 'pan_gene_5',
339, 2, ['pan_gene_4'], ['pan_gene_3'], ],
'pan_gene_5--pan_gene_7--test_single_chromosome': ['test_single_chromosome', 'pan_gene_5', 'pan_gene_7',
269, 2, ['pan_gene_4'], ['pan_gene_6']],
'pan_gene_2--pan_gene_7--test_single_chromosome': ['test_single_chromosome', 'pan_gene_2', 'pan_gene_7',
478, 2, ['pan_gene_1'], ['pan_gene_8']]}
coreless_contigs = {}
# Run function
return_core_gene_pairs, return_core_gene_pair_distance, \
return_accessory_gene_content, return_low_freq_gene_content, \
return_master_info, return_coreless_contigs = gff_parser.segment_gff_content(gff_generator, core_genes,
low_freq_genes, gff_path,
acc_genes,
complete_genomes)
# Evaluate
self.assertEqual(core_gene_pairs, return_core_gene_pairs)
self.assertEqual(core_gene_pair_distance, return_core_gene_pair_distance)
self.assertEqual(accessory_gene_content, return_accessory_gene_content)
self.assertEqual(low_freq_gene_content, return_low_freq_gene_content)
self.assertEqual(master_info, return_master_info)
self.assertEqual(coreless_contigs, return_coreless_contigs)
def test_something(self): # TODO - What other wired and wonderfull examples can we come up with?
pass
class TestMergingDicts(unittest.TestCase):
""" Functions to merge dictionaries and lists into dictionaries """
# Test merge_dicts_counts
def test_merge_dicts_counts_list_empty(self):
input_dict = {}
input_list = ['x', 'y', 'z']
expected_dict = {'x': 1,
'y': 1,
'z': 1}
return_dict = merge_dicts.merge_dicts_counts(input_dict, input_list)
self.assertEqual(return_dict, expected_dict)
def test_merge_dicts_counts_dict_empty(self):
input_dict = {}
input_list = {'x': 2, 'y': 2, 'z': 2}
expected_dict = {'x': 1,
'y': 1,
'z': 1}
return_dict = merge_dicts.merge_dicts_counts(input_dict, input_list)
self.assertEqual(return_dict, expected_dict)
def test_merge_dicts_counts_list_adding(self):
input_dict = {'x': 1,
'y': 1,
'z': 1}
input_list = ['x', 'y', 'z']
expected_dict = {'x': 2,
'y': 2,
'z': 2}
return_dict = merge_dicts.merge_dicts_counts(input_dict, input_list)
self.assertEqual(return_dict, expected_dict)
def test_merge_dicts_counts_dict_adding(self):
input_dict = {'x': 1,
'y': 1,
'z': 1}
input_list = {'x': 1, 'y': 1, 'z': 1}
expected_dict = {'x': 2,
'y': 2,
'z': 2}
return_dict = merge_dicts.merge_dicts_counts(input_dict, input_list)
self.assertEqual(return_dict, expected_dict)
def test_merge_dicts_counts_dict_mix(self):
input_dict = {'x': 1,
'y': 1,
'z': 1}
input_list = {'x': 1, 'y': 1}
expected_dict = {'x': 2,
'y': 2,
'z': 1}
return_dict = merge_dicts.merge_dicts_counts(input_dict, input_list)
self.assertEqual(return_dict, expected_dict)
def test_merge_dicts_counts_list_mix(self):
input_dict = {'x': 1,
'y': 1}
input_list = ['x', 'y', 'z']
expected_dict = {'x': 2,
'y': 2,
'z': 1}
return_dict = merge_dicts.merge_dicts_counts(input_dict, input_list)
self.assertEqual(return_dict, expected_dict)
# Test merge_dicts_lists
def test_merge_dicts_lists_empty(self):
input_dict = {}
merge_dict = {'x': ['test_3'],
'y': ['test_2'],
'z': ['test_1']}
expected_dict = {'x': ['test_3'],
'y': ['test_2'],
'z': ['test_1']}
return_dict = merge_dicts.merge_dicts_lists(input_dict, merge_dict)
self.assertEqual(expected_dict, return_dict)
def test_merge_dicts_lists_adding(self):
input_dict = {'x': ['init_3'],
'y': ['init_2'],
'z': ['init_1']}
merge_dict = {'x': ['test_3'],
'y': ['test_2'],
'z': ['test_1']}
expected_dict = {'x': ['init_3', 'test_3'],
'y': ['init_2', 'test_2'],
'z': ['init_1', 'test_1']}
return_dict = merge_dicts.merge_dicts_lists(input_dict, merge_dict)
self.assertEqual(expected_dict, return_dict)
def test_merge_dicts_lists_mix(self):
input_dict = {'x': ['init_3'],
'y': ['init_2']}
merge_dict = {'x': ['test_3'],
'y': ['test_2'],
'z': ['test_1']}
expected_dict = {'x': ['init_3', 'test_3'],
'y': ['init_2', 'test_2'],
'z': ['test_1']}
return_dict = merge_dicts.merge_dicts_lists(input_dict, merge_dict)
self.assertEqual(expected_dict, return_dict)
class TestCoreGraphConstruction(unittest.TestCase):
"""
Test the construction of a network made from core gene pairs and their number of connections.
"""
def test_core_gene_graph_construction_circle_case(self):
expected_edges = [('pan_cluster_1', 'pan_cluster_2'), ('pan_cluster_1', 'pan_cluster_6'), ('pan_cluster_2', 'pan_cluster_3'),
('pan_cluster_3', 'pan_cluster_4'), ('pan_cluster_4', 'pan_cluster_5'), ('pan_cluster_5', 'pan_cluster_6')]
expected_degrees = [('pan_cluster_1', 2), ('pan_cluster_2', 2), ('pan_cluster_3', 2), ('pan_cluster_4', 2), ('pan_cluster_5', 2), ('pan_cluster_6', 2)]
expected_edge_weights = [10, 10, 10, 10, 10, 10]
core_neighbour_pairs = {'pan_cluster_1--pan_cluster_2': 10,
'pan_cluster_2--pan_cluster_3': 10,
'pan_cluster_3--pan_cluster_4': 10,
'pan_cluster_4--pan_cluster_5': 10,
'pan_cluster_5--pan_cluster_6': 10,
'pan_cluster_1--pan_cluster_6': 10}
core_graph = consesus_core_genome.construct_core_graph(core_neighbour_pairs)
# Get edge weights:
edge_weights = [core_graph.get_edge_data(edge[0], edge[1])['weight'] for edge in list(core_graph.edges)]
# Assert outputs
self.assertEqual(expected_edges, list(core_graph.edges))
self.assertEqual(expected_degrees, list(core_graph.degree))
self.assertEqual(expected_edge_weights, edge_weights)
def test_core_gene_graph_construction_circle_case_with_single_break(self):
expected_edges = [('pan_cluster_1', 'pan_cluster_2'), ('pan_cluster_1', 'pan_cluster_6'), ('pan_cluster_2', 'pan_cluster_3'),
('pan_cluster_3', 'pan_cluster_4'), ('pan_cluster_4', 'pan_cluster_5'), ('pan_cluster_5', 'pan_cluster_6')]
expected_degrees = [('pan_cluster_1', 2), ('pan_cluster_2', 2), ('pan_cluster_3', 2), ('pan_cluster_4', 2), ('pan_cluster_5', 2), ('pan_cluster_6', 2)]
expected_edge_weights = [9, 10, 9, 10, 10, 10]
core_neighbour_pairs = {'pan_cluster_1--pan_cluster_2': 9,
'pan_cluster_1--Sequence_break': 1,
'Sequence_break--pan_cluster_2': 1,
'pan_cluster_2--pan_cluster_3': 9,
'pan_cluster_3--pan_cluster_4': 10,
'pan_cluster_4--pan_cluster_5': 10,
'pan_cluster_5--pan_cluster_6': 10,
'pan_cluster_1--pan_cluster_6': 10}
core_graph = consesus_core_genome.construct_core_graph(core_neighbour_pairs)
# Get edge weights:
edge_weights = [core_graph.get_edge_data(edge[0], edge[1])['weight'] for edge in list(core_graph.edges)]
# Assert outputs
self.assertEqual(expected_edges, list(core_graph.edges))
self.assertEqual(expected_degrees, list(core_graph.degree))
self.assertEqual(expected_edge_weights, edge_weights)
def test_core_gene_graph_construction_three_degree_case(self):
expected_edges = [('pan_cluster_1', 'pan_cluster_2'), ('pan_cluster_1', 'pan_cluster_6'), ('pan_cluster_1', 'pan_cluster_4'), ('pan_cluster_2', 'pan_cluster_3'),
('pan_cluster_3', 'pan_cluster_4'), ('pan_cluster_4', 'pan_cluster_5'), ('pan_cluster_5', 'pan_cluster_6')]
expected_degrees = [('pan_cluster_1', 3), ('pan_cluster_2', 2), ('pan_cluster_3', 2), ('pan_cluster_4', 3), ('pan_cluster_5', 2), ('pan_cluster_6', 2)]
expected_edge_weights = [10, 8, 2, 10, 8, 10, 10]
core_neighbour_pairs = {'pan_cluster_1--pan_cluster_2': 10,
'pan_cluster_2--pan_cluster_3': 10,
'pan_cluster_3--pan_cluster_4': 8,
'pan_cluster_4--pan_cluster_5': 10,
'pan_cluster_5--pan_cluster_6': 10,
'pan_cluster_1--pan_cluster_6': 8,
'pan_cluster_1--pan_cluster_4': 2}
core_graph = consesus_core_genome.construct_core_graph(core_neighbour_pairs)
# Get edge weights:
edge_weights = [core_graph.get_edge_data(edge[0], edge[1])['weight'] for edge in list(core_graph.edges)]
# Assert outputs
self.assertEqual(expected_edges, list(core_graph.edges))
self.assertEqual(expected_degrees, list(core_graph.degree))
self.assertEqual(expected_edge_weights, edge_weights)
def test_core_gene_graph_construction_three_degree_n_sequence_breaks_case(self):
expected_edges = [('pan_cluster_1', 'pan_cluster_2'), ('pan_cluster_1', 'pan_cluster_6'), ('pan_cluster_1', 'pan_cluster_4'), ('pan_cluster_2', 'pan_cluster_3'),
('pan_cluster_3', 'pan_cluster_4'), ('pan_cluster_4', 'pan_cluster_5'), ('pan_cluster_5', 'pan_cluster_6')]
expected_degrees = [('pan_cluster_1', 3), ('pan_cluster_2', 2), ('pan_cluster_3', 2), ('pan_cluster_4', 3), ('pan_cluster_5', 2), ('pan_cluster_6', 2)]
expected_edge_weights = [10, 8, 2, 10, 8, 10, 10]
core_neighbour_pairs = {'pan_cluster_1--pan_cluster_2': 10,
'pan_cluster_2--pan_cluster_3': 10,
'pan_cluster_3--pan_cluster_4': 8,
'pan_cluster_4--pan_cluster_5': 10,
'pan_cluster_5--pan_cluster_6': 10,
'pan_cluster_1--pan_cluster_6': 8,
'pan_cluster_1--pan_cluster_4': 2,
'pan_cluster_3--Sequence_break': 2,
'pan_cluster_6--Sequence_break': 2}
core_graph = consesus_core_genome.construct_core_graph(core_neighbour_pairs)
# Get edge weights:
edge_weights = [core_graph.get_edge_data(edge[0], edge[1])['weight'] for edge in list(core_graph.edges)]
# Assert outputs
self.assertEqual(expected_edges, list(core_graph.edges))
self.assertEqual(expected_degrees, list(core_graph.degree))
self.assertEqual(expected_edge_weights, edge_weights)
class TestGeneCoOccurrence(unittest.TestCase):
"""
Test function that identifies the number of genomes in which two core genes co-occur.
"""
def test_count_gene_co_occurrence(self):
core_gene_dict = {'Silas_the_Salmonella': {'Silas_the_Salmonella_tag-1-1': "A",
'Silas_the_Salmonella_tag-1-2.1': "B",
'Silas_the_Salmonella_tag-1-2.2': "B"},
'Christina_the_Streptococcus': {'Christina_the_Streptococcus_tag-2-1': "A",
'Christina_the_Streptococcus_tag-2-2': "B"},
'Ajwa_the_Shigella': {'Ajwa_the_Shigella_tag-3-1': "A",
'Ajwa_the_Shigella_tag-3-2': "B"},
'Ajwa_the_Legionella': {'Ajwa_the_Legionella_tag-4-1': "A",
'Ajwa_the_Legionella_tag-4-2': "B"},
'Cari_the_Listeria': {'Cari_the_Listeria_tag-5-1': "A",
'Cari_the_Listeria_tag-5-2': "B"},
'Aman_the_Streptococcus': {'Aman_the_Streptococcus_tag-6-1': "A",
'Aman_the_Streptococcus_tag-6-2': "B"},
'Zion_the_Streptococcus': {'Zion_the_Streptococcus_tag-7-1': "A",
'Zion_the_Streptococcus_tag-7-2': "B"},
'Dina_the_Shigella': {'Dina_the_Shigella_tag-8-1': "A",
'Dina_the_Shigella_tag-8-2': "B"},
'Silas_the_Legionella': {'Silas_the_Legionella_tag-9-1': "A",
'Silas_the_Legionella_tag-9-2': "B"},
'Lilly_the_Shigella': {'Lilly_the_Shigella_tag-10-1': "A",
'Lilly_the_Shigella_tag-10-2': "B"}}
segment = ["B", "A"]
expected_value = 10
a_occurrence = 10
b_occurrence = 10
return_value, individual_occurrences = consesus_core_genome.count_gene_co_occurrence(core_gene_dict, segment)
self.assertEqual(expected_value, return_value)
self.assertEqual(a_occurrence, individual_occurrences["A"])
self.assertEqual(b_occurrence, individual_occurrences["B"])
def test_count_gene_co_occurrence_no_occurence(self):
core_gene_dict = {'Silas_the_Salmonella': {'Silas_the_Salmonella_tag-1-2.1': "B",
'Silas_the_Salmonella_tag-1-2.2': "B"},
'Christina_the_Streptococcus': {'Christina_the_Streptococcus_tag-2-1': "A"},
'Ajwa_the_Shigella': {'Ajwa_the_Shigella_tag-3-2': "B"},
'Ajwa_the_Legionella': {'Ajwa_the_Legionella_tag-4-2': "B"},
'Cari_the_Listeria': {'Cari_the_Listeria_tag-5-1': "A"},
'Aman_the_Streptococcus': {'Aman_the_Streptococcus_tag-6-2': "B"},
'Zion_the_Streptococcus': {'Zion_the_Streptococcus_tag-7-1': "A"},
'Dina_the_Shigella': {'Dina_the_Shigella_tag-8-1': "A"},
'Silas_the_Legionella': {'Silas_the_Legionella_tag-9-2': "B"},
'Lilly_the_Shigella': {'Lilly_the_Shigella_tag-10-1': "A"}}
segment = ["B", "A"]
expected_value = 0
a_occurrence = 5
b_occurrence = 5
return_value, individual_occurrences = consesus_core_genome.count_gene_co_occurrence(core_gene_dict, segment)
self.assertEqual(expected_value, return_value)
self.assertEqual(a_occurrence, individual_occurrences["A"])
self.assertEqual(b_occurrence, individual_occurrences["B"])
class TestSegmentationIdentification(unittest.TestCase):
"""
Test the function that identifies core gene segments from a pan-genome.
"""
def test_double_edge_segment_identification_all_2_degree_input(self):
core_neighbour_pairs = {'pan_cluster_1--pan_cluster_2': 10,
'pan_cluster_2--pan_cluster_3': 10,
'pan_cluster_3--pan_cluster_4': 10,
'pan_cluster_4--pan_cluster_5': 10,
'pan_cluster_5--pan_cluster_6': 10,
'pan_cluster_6--pan_cluster_1': 10}
core_graph = consesus_core_genome.construct_core_graph(core_neighbour_pairs)
num_components = number_connected_components(core_graph)
return_1 = consesus_core_genome.identify_segments(core_graph, 10, {}, num_components)
self.assertEqual(None, return_1)
def test_double_edge_segment_identification_two_segments(self):
expected_segments = {'pan_cluster_1--pan_cluster_5': ['pan_cluster_1', 'pan_cluster_6', 'pan_cluster_5'], 'pan_cluster_2--pan_cluster_4': ['pan_cluster_2', 'pan_cluster_3', 'pan_cluster_4']}
core_neighbour_pairs = {'pan_cluster_1--pan_cluster_2': 9,
'pan_cluster_1--pan_cluster_4': 1,
'pan_cluster_2--pan_cluster_3': 10,
'pan_cluster_3--pan_cluster_4': 10,
'pan_cluster_2--pan_cluster_5': 1,
'pan_cluster_4--pan_cluster_5': 9,
'pan_cluster_5--pan_cluster_6': 10,
'pan_cluster_6--pan_cluster_1': 10}
core_gene_dict = {'genome_1': {'tag_1': 'pan_cluster_1', 'tag_2': 'pan_cluster_4', 'tag_3': 'pan_cluster_2', 'tag_4': 'pan_cluster_5'},
'genome_2': {'tag_1': 'pan_cluster_1', 'tag_2': 'pan_cluster_4', 'tag_3': 'pan_cluster_2', 'tag_4': 'pan_cluster_5'},
'genome_3': {'tag_1': 'pan_cluster_1', 'tag_2': 'pan_cluster_4', 'tag_3': 'pan_cluster_2', 'tag_4': 'pan_cluster_5'},
'genome_4': {'tag_1': 'pan_cluster_1', 'tag_2': 'pan_cluster_4', 'tag_3': 'pan_cluster_2', 'tag_4': 'pan_cluster_5'},
'genome_5': {'tag_1': 'pan_cluster_1', 'tag_2': 'pan_cluster_4', 'tag_3': 'pan_cluster_2', 'tag_4': 'pan_cluster_5'},
'genome_6': {'tag_1': 'pan_cluster_1', 'tag_2': 'pan_cluster_4', 'tag_3': 'pan_cluster_2', 'tag_4': 'pan_cluster_5'},
'genome_7': {'tag_1': 'pan_cluster_1', 'tag_2': 'pan_cluster_4', 'tag_3': 'pan_cluster_2', 'tag_4': 'pan_cluster_5'},
'genome_8': {'tag_1': 'pan_cluster_1', 'tag_2': 'pan_cluster_4', 'tag_3': 'pan_cluster_2', 'tag_4': 'pan_cluster_5'},
'genome_9': {'tag_1': 'pan_cluster_1', 'tag_2': 'pan_cluster_4', 'tag_3': 'pan_cluster_2', 'tag_4': 'pan_cluster_5'},
'genome_10': {'tag_1': 'pan_cluster_1', 'tag_2': 'pan_cluster_4', 'tag_3': 'pan_cluster_2', 'tag_4': 'pan_cluster_5'},}
core_graph = consesus_core_genome.construct_core_graph(core_neighbour_pairs)
num_components = number_connected_components(core_graph)
double_edge_segements = consesus_core_genome.identify_segments(core_graph, 10, core_gene_dict, num_components)
self.assertEqual(expected_segments, double_edge_segements)
def test_double_edge_segment_identification_four_segments(self):
expected_segments = {'pan_cluster_1--pan_cluster_3': ['pan_cluster_1', 'pan_cluster_2', 'pan_cluster_3'],
'pan_cluster_1--pan_cluster_9': ['pan_cluster_1', 'pan_cluster_10', 'pan_cluster_9'],
'pan_cluster_3--pan_cluster_6': ['pan_cluster_6', 'pan_cluster_5', 'pan_cluster_4', 'pan_cluster_3'],
'pan_cluster_6--pan_cluster_9': ['pan_cluster_6', 'pan_cluster_7', 'pan_cluster_8', 'pan_cluster_9']}
core_neighbour_pairs = {'pan_cluster_1--pan_cluster_2': 9,
'pan_cluster_1--pan_cluster_6': 1,
'pan_cluster_2--pan_cluster_3': 10,
'pan_cluster_3--pan_cluster_4': 9,
'pan_cluster_3--pan_cluster_9': 1,
'pan_cluster_4--pan_cluster_5': 10,
'pan_cluster_5--pan_cluster_6': 10,
'pan_cluster_6--pan_cluster_7': 10,
'pan_cluster_7--pan_cluster_8': 10,
'pan_cluster_8--pan_cluster_9': 9,
'pan_cluster_9--pan_cluster_10': 10,
'pan_cluster_1--pan_cluster_10': 10}
core_graph = consesus_core_genome.construct_core_graph(core_neighbour_pairs)
num_components = number_connected_components(core_graph)
double_edge_segements = consesus_core_genome.identify_segments(core_graph, 10, {}, num_components)
self.assertEqual(expected_segments, double_edge_segements)
def test_double_edge_segment_identification_segments_node_w_four_degrees(self):
# expected_segments = {'pan_cluster_4--pan_cluster_6': ['pan_cluster_4', 'pan_cluster_5', 'pan_cluster_6']}
expected_segments = {'pan_cluster_2--pan_cluster_4': ['pan_cluster_2',
'pan_cluster_3',
'pan_cluster_4'],
'pan_cluster_2--pan_cluster_6': ['pan_cluster_2',
'pan_cluster_1',
'pan_cluster_6'],
'pan_cluster_4--pan_cluster_6': ['pan_cluster_4',
'pan_cluster_5',
'pan_cluster_6']}
core_neighbour_pairs = {'pan_cluster_1--pan_cluster_2': 9,
'pan_cluster_2--pan_cluster_3': 9,
'pan_cluster_2--pan_cluster_4': 1,
'pan_cluster_2--pan_cluster_6': 1,
'pan_cluster_3--pan_cluster_4': 9,
'pan_cluster_4--pan_cluster_5': 10,
'pan_cluster_5--pan_cluster_6': 10,
'pan_cluster_6--pan_cluster_1': 9}
core_graph = consesus_core_genome.construct_core_graph(core_neighbour_pairs)
num_components = number_connected_components(core_graph)
double_edge_segements = consesus_core_genome.identify_segments(core_graph, 10, {}, num_components)
self.assertEqual(expected_segments, double_edge_segements)
def test_double_edge_segment_identification_segments_node_w_challenging_paths(self):
expected_segments = {'pan_cluster_A--pan_cluster_B': ['pan_cluster_A', 'pan_cluster_E', 'pan_cluster_F', 'pan_cluster_G', 'pan_cluster_B'],
'pan_cluster_B--pan_cluster_C': ['pan_cluster_C', 'pan_cluster_B']}
core_neighbour_pairs = {'pan_cluster_A--pan_cluster_C': 4,
'pan_cluster_A--pan_cluster_D': 4,
'pan_cluster_A--pan_cluster_E': 2,
'pan_cluster_B--pan_cluster_C': 5,
'pan_cluster_B--pan_cluster_D': 3,
'pan_cluster_B--pan_cluster_G': 2,
'pan_cluster_C--pan_cluster_D': 1,
'pan_cluster_E--pan_cluster_F': 2,
'pan_cluster_F--pan_cluster_G': 2,
}
core_gene_dict = {'genome_1': {'tag_4': 'pan_cluster_D', 'tag_3': 'pan_cluster_C', 'tag_2': 'pan_cluster_B', 'tag_1': 'pan_cluster_A', },
'genome_2': {'tag_4': 'pan_cluster_D', 'tag_3': 'pan_cluster_C', 'tag_2': 'pan_cluster_B', 'tag_1': 'pan_cluster_A', },
'genome_3': {'tag_4': 'pan_cluster_D', 'tag_3': 'pan_cluster_C', 'tag_2': 'pan_cluster_B', 'tag_1': 'pan_cluster_A', },
'genome_4': {'tag_4': 'pan_cluster_D', 'tag_3': 'pan_cluster_C', 'tag_2': 'pan_cluster_B', 'tag_1': 'pan_cluster_A', },
'genome_5': {'tag_4': 'pan_cluster_D', 'tag_3': 'pan_cluster_C', 'tag_2': 'pan_cluster_B', 'tag_1': 'pan_cluster_A', }}
core_graph = consesus_core_genome.construct_core_graph(core_neighbour_pairs)
num_components = number_connected_components(core_graph)
double_edge_segements = consesus_core_genome.identify_segments(core_graph, 5, core_gene_dict, num_components)
self.assertEqual(expected_segments, double_edge_segements)
def test_double_edge_segment_identification_segments_node_w_challenging_paths_2(self):
expected_segments = {'pan_cluster_A--pan_cluster_B': ['pan_cluster_A', 'pan_cluster_F', 'pan_cluster_B'],
'pan_cluster_B--pan_cluster_C': ['pan_cluster_B', 'pan_cluster_I', 'pan_cluster_C']}
core_neighbour_pairs = {'pan_cluster_A--pan_cluster_D': 2,
'pan_cluster_A--pan_cluster_E': 1,
'pan_cluster_A--pan_cluster_F': 7,
'pan_cluster_B--pan_cluster_F': 7,
'pan_cluster_B--pan_cluster_I': 8,
'pan_cluster_B--pan_cluster_D': 1,
'pan_cluster_C--pan_cluster_E': 1,
'pan_cluster_C--pan_cluster_D': 1,
'pan_cluster_C--pan_cluster_I': 8,
'pan_cluster_D--pan_cluster_E': 1
}
core_gene_dict = {'genome_1': {'tag_5': 'pan_cluster_E', 'tag_4': 'pan_cluster_D', 'tag_3': 'pan_cluster_C', 'tag_2': 'pan_cluster_B', 'tag_1': 'pan_cluster_A', },
'genome_2': {'tag_5': 'pan_cluster_E', 'tag_4': 'pan_cluster_D', 'tag_3': 'pan_cluster_C', 'tag_2': 'pan_cluster_B', 'tag_1': 'pan_cluster_A', },
'genome_3': {'tag_5': 'pan_cluster_E', 'tag_4': 'pan_cluster_D', 'tag_3': 'pan_cluster_C', 'tag_2': 'pan_cluster_B', 'tag_1': 'pan_cluster_A', },
'genome_4': {'tag_5': 'pan_cluster_E', 'tag_4': 'pan_cluster_D', 'tag_3': 'pan_cluster_C', 'tag_2': 'pan_cluster_B', 'tag_1': 'pan_cluster_A', },
'genome_5': {'tag_5': 'pan_cluster_E', 'tag_4': 'pan_cluster_D', 'tag_3': 'pan_cluster_C', 'tag_2': 'pan_cluster_B', 'tag_1': 'pan_cluster_A', },
'genome_6': {'tag_5': 'pan_cluster_E', 'tag_4': 'pan_cluster_D', 'tag_3': 'pan_cluster_C', 'tag_2': 'pan_cluster_B', 'tag_1': 'pan_cluster_A', },
'genome_7': {'tag_5': 'pan_cluster_E', 'tag_4': 'pan_cluster_D', 'tag_3': 'pan_cluster_C', 'tag_2': 'pan_cluster_B', 'tag_1': 'pan_cluster_A', },
'genome_8': {'tag_5': 'pan_cluster_E', 'tag_4': 'pan_cluster_D', 'tag_3': 'pan_cluster_C', 'tag_2': 'pan_cluster_B', 'tag_1': 'pan_cluster_A', }}
core_graph = consesus_core_genome.construct_core_graph(core_neighbour_pairs)
num_components = number_connected_components(core_graph)
double_edge_segements = consesus_core_genome.identify_segments(core_graph, 8, core_gene_dict, num_components)
self.assertEqual(expected_segments, double_edge_segements)
def test_double_edge_segment_identification_segments_node_w_all_challenging_paths(self):
expected_segments = {'pan_cluster_A--pan_cluster_D': ['pan_cluster_A', 'pan_cluster_G', 'pan_cluster_F', 'pan_cluster_E', 'pan_cluster_D'],
'pan_cluster_B--pan_cluster_C': ['pan_cluster_B', 'pan_cluster_H', 'pan_cluster_I', 'pan_cluster_J', 'pan_cluster_C']}#,}
core_neighbour_pairs = {'pan_cluster_A--pan_cluster_B': 4,
'pan_cluster_A--pan_cluster_K': 4,
'pan_cluster_A--pan_cluster_G': 2,
'pan_cluster_B--pan_cluster_H': 2,
'pan_cluster_B--pan_cluster_L': 4,
'pan_cluster_C--pan_cluster_J': 2,
'pan_cluster_C--pan_cluster_K': 4,
'pan_cluster_D--pan_cluster_C': 4,
'pan_cluster_D--pan_cluster_L': 4,
'pan_cluster_D--pan_cluster_E': 2,
'pan_cluster_E--pan_cluster_F': 2,
'pan_cluster_F--pan_cluster_G': 2,
'pan_cluster_H--pan_cluster_I': 2,
'pan_cluster_I--pan_cluster_J': 2,
'pan_cluster_K--pan_cluster_L': 1
}
core_gene_dict = {'genome_1': {'tag_5': 'pan_cluster_K', 'tag_4': 'pan_cluster_L', 'tag_3': 'pan_cluster_A', 'tag_2': 'pan_cluster_B', 'tag_1': 'pan_cluster_A', 'tag_6': 'pan_cluster_C', 'tag_7': 'pan_cluster_D'},
'genome_2': {'tag_5': 'pan_cluster_K', 'tag_4': 'pan_cluster_L', 'tag_3': 'pan_cluster_A', 'tag_2': 'pan_cluster_B', 'tag_1': 'pan_cluster_A', 'tag_6': 'pan_cluster_C', 'tag_7': 'pan_cluster_D'},
'genome_3': {'tag_5': 'pan_cluster_K', 'tag_4': 'pan_cluster_L', 'tag_3': 'pan_cluster_A', 'tag_2': 'pan_cluster_B', 'tag_1': 'pan_cluster_A', 'tag_6': 'pan_cluster_C', 'tag_7': 'pan_cluster_D'},
'genome_4': {'tag_5': 'pan_cluster_K', 'tag_4': 'pan_cluster_L', 'tag_3': 'pan_cluster_A', 'tag_2': 'pan_cluster_B', 'tag_1': 'pan_cluster_A', 'tag_6': 'pan_cluster_C', 'tag_7': 'pan_cluster_D'},
'genome_5': {'tag_5': 'pan_cluster_K', 'tag_4': 'pan_cluster_L', 'tag_3': 'pan_cluster_A', 'tag_2': 'pan_cluster_B', 'tag_1': 'pan_cluster_A', 'tag_6': 'pan_cluster_C', 'tag_7': 'pan_cluster_D'}}
core_graph = consesus_core_genome.construct_core_graph(core_neighbour_pairs)
num_components = number_connected_components(core_graph)
double_edge_segements = consesus_core_genome.identify_segments(core_graph, 5, core_gene_dict, num_components)
self.assertEqual(expected_segments, double_edge_segements)
def test_double_edge_segment_identification_segments_node_w_less_than_all_present(self):
expected_segments = {'pan_cluster_B--pan_cluster_D': ['pan_cluster_B', 'pan_cluster_C', 'pan_cluster_D'],
'pan_cluster_F--pan_cluster_H': ['pan_cluster_H', 'pan_cluster_G', 'pan_cluster_F'],
'pan_cluster_B--pan_cluster_H': ['pan_cluster_B', 'pan_cluster_A', 'pan_cluster_H'],
'pan_cluster_D--pan_cluster_F': ['pan_cluster_D', 'pan_cluster_E', 'pan_cluster_F']}
core_neighbour_pairs = {'pan_cluster_A--pan_cluster_B': 9,
'pan_cluster_A--pan_cluster_H': 9,
'pan_cluster_B--pan_cluster_H': 1,
'pan_cluster_B--pan_cluster_C': 10,
'pan_cluster_C--pan_cluster_D': 10,
'pan_cluster_D--pan_cluster_E': 9,
'pan_cluster_D--pan_cluster_F': 1,
'pan_cluster_E--pan_cluster_F': 9,
'pan_cluster_F--pan_cluster_G': 10,
'pan_cluster_G--pan_cluster_H': 10,
'pan_cluster_H--pan_cluster_A': 9,
}
core_graph = consesus_core_genome.construct_core_graph(core_neighbour_pairs)
num_components = number_connected_components(core_graph)
double_edge_segements = consesus_core_genome.identify_segments(core_graph, 10, {}, num_components)
self.assertEqual(expected_segments, double_edge_segements)
def test_double_edge_segment_identification_segments_node_w_two_gene_segment(self):
expected_segments = {'pan_cluster_A--pan_cluster_B': ['pan_cluster_A', 'pan_cluster_B'],
'pan_cluster_A--pan_cluster_G': ['pan_cluster_A', 'pan_cluster_I', 'pan_cluster_H', 'pan_cluster_G'],
'pan_cluster_B--pan_cluster_E': ['pan_cluster_B', 'pan_cluster_C', 'pan_cluster_D', 'pan_cluster_E'],
'pan_cluster_E--pan_cluster_G': ['pan_cluster_G', 'pan_cluster_F', 'pan_cluster_E']}
core_neighbour_pairs = {'pan_cluster_A--pan_cluster_B': 3,
'pan_cluster_A--pan_cluster_I': 2,
'pan_cluster_A--pan_cluster_G': 1,
'pan_cluster_B--pan_cluster_C': 2,
'pan_cluster_B--pan_cluster_E': 1,
'pan_cluster_C--pan_cluster_D': 3,
'pan_cluster_D--pan_cluster_E': 3,
'pan_cluster_E--pan_cluster_F': 2,
'pan_cluster_F--pan_cluster_G': 2,
'pan_cluster_G--pan_cluster_H': 3,
'pan_cluster_H--pan_cluster_I': 3
}
core_gene_dict = {'genome_1': {'gene_1': 'pan_cluster_A', 'gene_2': 'pan_cluster_B', 'gene_3': 'pan_cluster_E', 'gene_4': 'pan_cluster_G', 'gene_5': 'pan_cluster_D', 'gene_7': 'pan_cluster_H'},
'genome_2': {'gene_1': 'pan_cluster_A', 'gene_2': 'pan_cluster_B', 'gene_3': 'pan_cluster_E', 'gene_4': 'pan_cluster_G', 'gene_5': 'pan_cluster_D', 'gene_7': 'pan_cluster_H'},
'genome_3': {'gene_1': 'pan_cluster_A', 'gene_2': 'pan_cluster_B', 'gene_3': 'pan_cluster_E', 'gene_4': 'pan_cluster_G', 'gene_5': 'pan_cluster_D', 'gene_7': 'pan_cluster_H'}}
core_graph = consesus_core_genome.construct_core_graph(core_neighbour_pairs)
num_components = number_connected_components(core_graph)
double_edge_segements = consesus_core_genome.identify_segments(core_graph, 3, core_gene_dict, num_components)
self.assertEqual(expected_segments, double_edge_segements)
def test_multiple_component_core_graph(self):
expected_segments = {'pan_cluster_A--pan_cluster_I': ['pan_cluster_A', 'pan_cluster_I'],
'pan_cluster_B--pan_cluster_C': ['pan_cluster_C', 'pan_cluster_B'],
'pan_cluster_D--pan_cluster_J': ['pan_cluster_D', 'pan_cluster_J'],
'pan_cluster_E--pan_cluster_K': ['pan_cluster_E', 'pan_cluster_K'],
'pan_cluster_F--pan_cluster_G': ['pan_cluster_G', 'pan_cluster_F'],
'pan_cluster_H--pan_cluster_M': ['pan_cluster_H', 'pan_cluster_L', 'pan_cluster_M'],
'pan_cluster_Q--pan_cluster_O': ['pan_cluster_Q', 'pan_cluster_P', 'pan_cluster_O']}
core_neighbour_pairs = {'pan_cluster_A--pan_cluster_B': 1,
'pan_cluster_A--pan_cluster_C': 1,
'pan_cluster_A--pan_cluster_I': 2,
'pan_cluster_B--pan_cluster_C': 2,
'pan_cluster_B--pan_cluster_D': 1,
'pan_cluster_C--pan_cluster_D': 1,
'pan_cluster_D--pan_cluster_J': 2,
'pan_cluster_E--pan_cluster_F': 1,
'pan_cluster_E--pan_cluster_G': 1,
'pan_cluster_E--pan_cluster_K': 2,
'pan_cluster_F--pan_cluster_G': 2,
'pan_cluster_F--pan_cluster_H': 1,
'pan_cluster_G--pan_cluster_H': 1,
'pan_cluster_H--pan_cluster_L': 2,
'pan_cluster_L--pan_cluster_M': 2,
'pan_cluster_O--pan_cluster_P': 2,
'pan_cluster_P--pan_cluster_Q': 2,
}
core_gene_dict = {'genome_1': {'tag_1': 'pan_cluster_A', 'tag_2': 'pan_cluster_B', 'tag_3': 'pan_cluster_C',
'tag_4': 'pan_cluster_D', 'tag_5': 'pan_cluster_E', 'tag_6': 'pan_cluster_F',
'tag_7': 'pan_cluster_G', 'tag_8': 'pan_cluster_H', 'tag_9': 'pan_cluster_I',
'tag_10': 'pan_cluster_J', 'tag_11': 'pan_cluster_K', 'tag_12': 'pan_cluster_L',
'tag_13': 'pan_cluster_M', 'tag_14': 'pan_cluster_O', 'tag_15': 'pan_cluster_P',
'tag_16': 'pan_cluster_Q'},
'genome_2': {'tag_1': 'pan_cluster_A', 'tag_2': 'pan_cluster_B', 'tag_3': 'pan_cluster_C',
'tag_4': 'pan_cluster_D', 'tag_5': 'pan_cluster_E', 'tag_6': 'pan_cluster_F',
'tag_7': 'pan_cluster_G', 'tag_8': 'pan_cluster_H', 'tag_9': 'pan_cluster_I',
'tag_10': 'pan_cluster_J', 'tag_11': 'pan_cluster_K', 'tag_12': 'pan_cluster_L',
'tag_13': 'pan_cluster_M', 'tag_14': 'pan_cluster_O', 'tag_15': 'pan_cluster_P',
'tag_16': 'pan_cluster_Q'}}
core_graph = consesus_core_genome.construct_core_graph(core_neighbour_pairs)
num_components = number_connected_components(core_graph)
double_edge_segements = {}
for component in connected_components(core_graph):
component_graph = core_graph.subgraph(component).copy()
double_edge_segements = double_edge_segements | consesus_core_genome.identify_segments(component_graph, 2,
core_gene_dict,
num_components)
# comparisons = [True for x in double_edge_segements
# if
# (x in expected_segments and
# (expected_segments[x] == double_edge_segements[x] or expected_segments[x][::-1] == double_edge_segements[x]))
# or
# (f"{x.split('--')[1]}'--'{x.split('--')[0]}" in expected_segments and
# (expected_segments[x] == double_edge_segements[f"{x.split('--')[1]}'--'{x.split('--')[0]}"] or expected_segments[x][::-1] == double_edge_segements[f"{x.split('--')[1]}'--'{x.split('--')[0]}"]))
# ]
key_forward = [x for x in double_edge_segements if x in expected_segments]
key_reverse = [f"{x.split('--')[1]}--{x.split('--')[0]}" for x in double_edge_segements if f"{x.split('--')[1]}--{x.split('--')[0]}" in expected_segments]
expected_key_match = key_forward+key_reverse
# Test if the number of expected segments were returned
self.assertEqual(len(expected_key_match), len(expected_segments))
comparisons = [True for returned_key, expected_key in zip(double_edge_segements, expected_key_match)
if double_edge_segements[returned_key] == expected_segments[expected_key]
or
double_edge_segements[returned_key] == expected_segments[expected_key][::-1]]
# Test of all returned segments look as expected
self.assertTrue(all(comparisons))
# TODO - Chat to Andrew about this function how it works and how we can test it more - possibly just run some things to see if it breaks
class TestNoAccessorySegmentIdentifcation(unittest.TestCase):
"""
Test the function that takes in segments of core genes and divide them into sub-segments based on the accessory content between core genes in segment.
"""
def test_no_accessory_genes_in_segment(self):
expected_sub_sgments = {'pan_cluster_1--pan_cluster_5': [['pan_cluster_1', 'pan_cluster_6', 'pan_cluster_5']],
'pan_cluster_2--pan_cluster_4': [['pan_cluster_2', 'pan_cluster_3', 'pan_cluster_4']]}
double_edge_segements = {'pan_cluster_1--pan_cluster_5': ['pan_cluster_1', 'pan_cluster_6', 'pan_cluster_5'],
'pan_cluster_2--pan_cluster_4': ['pan_cluster_2', 'pan_cluster_3', 'pan_cluster_4']}
combined_acc_gene_count = {'pan_cluster_1--pan_cluster_6': 0, 'pan_cluster_5--pan_cluster_6': 0, 'pan_cluster_2--pan_cluster_3': 0, 'pan_cluster_3--pan_cluster_4': 0}
sub_segment_dict = consesus_core_genome.identify_no_accessory_segments(double_edge_segements, combined_acc_gene_count)
self.assertEqual(sub_segment_dict, expected_sub_sgments)
def test_accessory_genes_in_segment_first_gene_lonely(self):
expected_sub_sgments = {'pan_cluster_1--pan_cluster_5': [['pan_cluster_1'], ['pan_cluster_6', 'pan_cluster_5']]}
double_edge_segements = {'pan_cluster_1--pan_cluster_5': ['pan_cluster_1', 'pan_cluster_6', 'pan_cluster_5']}
combined_acc_gene_count = {'pan_cluster_1--pan_cluster_6': 1, 'pan_cluster_5--pan_cluster_6': 0}
sub_segment_dict = consesus_core_genome.identify_no_accessory_segments(double_edge_segements, combined_acc_gene_count)
self.assertEqual(sub_segment_dict, expected_sub_sgments)
def test_accessory_genes_in_segment_last_gene_lonely(self):
expected_sub_sgments = {'pan_cluster_1--pan_cluster_5': [['pan_cluster_1', 'pan_cluster_6'], ['pan_cluster_5']],
'pan_cluster_2--pan_cluster_4': [['pan_cluster_2', 'pan_cluster_3', 'pan_cluster_4']]}
double_edge_segements = {'pan_cluster_1--pan_cluster_5': ['pan_cluster_1', 'pan_cluster_6', 'pan_cluster_5'],
'pan_cluster_2--pan_cluster_4': ['pan_cluster_2', 'pan_cluster_3', 'pan_cluster_4']}
combined_acc_gene_count = {'pan_cluster_1--pan_cluster_6': 0, 'pan_cluster_5--pan_cluster_6': 1, 'pan_cluster_2--pan_cluster_3': 0, 'pan_cluster_3--pan_cluster_4': 0}
sub_segment_dict = consesus_core_genome.identify_no_accessory_segments(double_edge_segements, combined_acc_gene_count)
self.assertEqual(sub_segment_dict, expected_sub_sgments)
def test_accessory_genes_in_segment_middle(self):
expected_sub_sgments = {'pan_cluster_1--pan_cluster_4': [['pan_cluster_1', 'pan_cluster_2'], ['pan_cluster_3', 'pan_cluster_4']]}
double_edge_segements = {'pan_cluster_1--pan_cluster_4': ['pan_cluster_1', 'pan_cluster_2', 'pan_cluster_3', 'pan_cluster_4']}
combined_acc_gene_count = {'pan_cluster_1--pan_cluster_2': 0, 'pan_cluster_2--pan_cluster_3': 1, 'pan_cluster_3--pan_cluster_4': 0}
sub_segment_dict = consesus_core_genome.identify_no_accessory_segments(double_edge_segements, combined_acc_gene_count)
self.assertEqual(sub_segment_dict, expected_sub_sgments)
def test_accessory_genes_in_multiple_places(self):
expected_sub_sgments = {'pan_cluster_1--pan_cluster_4': [['pan_cluster_1'], ['pan_cluster_2', 'pan_cluster_3'], ['pan_cluster_4']]}
double_edge_segements = {'pan_cluster_1--pan_cluster_4': ['pan_cluster_1', 'pan_cluster_2', 'pan_cluster_3', 'pan_cluster_4']}
combined_acc_gene_count = {'pan_cluster_1--pan_cluster_2': 1, 'pan_cluster_2--pan_cluster_3': 0, 'pan_cluster_3--pan_cluster_4': 1}
sub_segment_dict = consesus_core_genome.identify_no_accessory_segments(double_edge_segements, combined_acc_gene_count)
self.assertEqual(sub_segment_dict, expected_sub_sgments)
class TestSummaryTableConstruction(unittest.TestCase):
def test_summary_table_calculations(self):
master_info = {
'pan_cluster_1--pan_cluster_2--genome_1': ['genome_1', 'pan_cluster_1', 'pan_cluster_2', 99, 3,
['Acc_1', 'Acc_2'], ['low_1']],
'pan_cluster_1--pan_cluster_2--genome_2': ['genome_2', 'pan_cluster_1', 'pan_cluster_2', 99, 3,
['Acc_1', 'Acc_2'], ['low_1']],
'pan_cluster_1--pan_cluster_2--genome_3': ['genome_3', 'pan_cluster_1', 'pan_cluster_2', 99, 3,
['Acc_1', 'Acc_2'], ['low_1']],
'pan_cluster_2--pan_cluster_3--genome_1': ['genome_1', 'pan_cluster_2', 'pan_cluster_3', 100, 2,
['Acc_1', 'Acc_2'], []],
'pan_cluster_2--pan_cluster_4--genome_2': ['genome_2', 'pan_cluster_2', 'pan_cluster_3', 150, 1,
['Acc_1', ], []],
'pan_cluster_2--pan_cluster_3--genome_3': ['genome_3', 'pan_cluster_2', 'pan_cluster_3', 200, 0,
[], []],
'pan_cluster_3--pan_cluster_4--genome_1': ['genome_1', 'pan_cluster_2', 'pan_cluster_3', -5, 0,
[], []],
'pan_cluster_3--pan_cluster_4--genome_3': ['genome_3', 'pan_cluster_2', 'pan_cluster_3', -10, 0,
[], []]
}
core_gene_dict = {'genome_1': {'gene_1': 'pan_cluster_1', 'gene_2': 'pan_cluster_2', 'gene_3': 'pan_cluster_3', 'gene_4': 'pan_cluster_4'},
'genome_2': {'gene_1': 'pan_cluster_1', 'gene_2': 'pan_cluster_2', 'gene_4': 'pan_cluster_4'},
'genome_3': {'gene_1': 'pan_cluster_1', 'gene_2': 'pan_cluster_2', 'gene_3': 'pan_cluster_3', 'gene_4': 'pan_cluster_4'}}
expected_table = {'pan_cluster_1--pan_cluster_2': ['pan_cluster_1-pan_cluster_2', 3, 3, 3, 3, 99, 99, 99.0, 99.0, 3, 3, 3.0, 3.0],
'pan_cluster_2--pan_cluster_3': ['pan_cluster_2-pan_cluster_3', 2, 3, 2, 2, 100, 200, 150.0, 150.0, 0, 2, 1.0, 1.0],
'pan_cluster_2--pan_cluster_4': ['pan_cluster_2-pan_cluster_4', 1, 3, 3, 3, 150, 150, 150.0, 150.0, 1, 1, 1.0, 1.0],
'pan_cluster_3--pan_cluster_4': ['pan_cluster_3-pan_cluster_4', 2, 2, 3, 2, -10, -5, -7.5, -7.5, 0, 0, 0.0, 0.0]}
return_table = summary_table.calculate_n_create_summaries(master_info, core_gene_dict)
self.assertEqual(expected_table, return_table)
def test_summary_table_calculations_w_sequence_breaks(self):
master_info = {
'pan_cluster_1--pan_cluster_2--genome_1': ['genome_1', 'pan_cluster_1', 'pan_cluster_2', 99, 3,
['Acc_1', 'Acc_2'], ['low_1']],
'pan_cluster_1--Sequence_break--genome_2': ['genome_2', 'pan_cluster_1', 'Sequence_break', 100, 0,
[], []],
'Sequence_break--pan_cluster_2--genome_2': ['genome_2', 'Sequence_break', 'pan_cluster_2', 99, 3,
['Acc_1', 'Acc_2'], ['low_1']],
'pan_cluster_1--pan_cluster_2--genome_3': ['genome_3', 'pan_cluster_1', 'pan_cluster_2', 99, 3,
['Acc_1', 'Acc_2'], ['low_1']],
'pan_cluster_2--pan_cluster_3--genome_1': ['genome_1', 'pan_cluster_2', 'pan_cluster_3', 100, 2,
['Acc_1', 'Acc_2'], []],
'pan_cluster_2--pan_cluster_3--genome_3': ['genome_3', 'pan_cluster_2', 'pan_cluster_3', 200, 0,
[], []],
'pan_cluster_3--Sequence_break--genome_1': ['genome_1', 'pan_cluster_3', 'Sequence_break', 100, 2,
['Acc_1', 'Acc_2'], []],
'pan_cluster_3--Sequence_break--genome_3': ['genome_3', 'pan_cluster_3', 'Sequence_break', 200, 0,
[], []]
}
core_gene_dict = {'genome_1': {'gene_1': 'pan_cluster_1', 'gene_2': 'pan_cluster_2', 'gene_3': 'pan_cluster_3'},
'genome_2': {'gene_1': 'pan_cluster_1', 'gene_2': 'pan_cluster_2'},
'genome_3': {'gene_1': 'pan_cluster_1', 'gene_2': 'pan_cluster_2', 'gene_3': 'pan_cluster_3'}}
expected_table = {'pan_cluster_1--pan_cluster_2': ['pan_cluster_1-pan_cluster_2', 2, 3, 3, 3, 99, 99, 99.0, 99.0, 3, 3, 3.0, 3.0],
'pan_cluster_1--Sequence_break': ['pan_cluster_1-Sequence_break', 1, 3, 0, 0, 100, 100, 100.0, 100.0, 0, 0, 0.0, 0.0],
'Sequence_break--pan_cluster_2': ['Sequence_break-pan_cluster_2', 1, 0, 3, 0, 99, 99, 99.0, 99.0, 3, 3, 3.0, 3.0],
'pan_cluster_2--pan_cluster_3': ['pan_cluster_2-pan_cluster_3', 2, 3, 2, 2, 100, 200, 150.0, 150.0, 0, 2, 1.0, 1.0],
'pan_cluster_3--Sequence_break': ['pan_cluster_3-Sequence_break', 2, 2, 0, 0, 100, 200, 150.0, 150.0, 0, 2, 1.0, 1.0]}
return_table = summary_table.calculate_n_create_summaries(master_info, core_gene_dict)
self.assertEqual(expected_table, return_table)
class TestWritingOutputFunction(unittest.TestCase):
"""
Function to test the creation of output files
"""
def tearDown(self):
""" Class to remove created database files of gff files in tmp-folder"""
for file in os.listdir('TestWritingOutputFunction'):
if "sv" in file:
db_path = os.path.join('TestWritingOutputFunction', file)
os.remove(db_path)
def test_master_info_writer(self):
master_info = {
'pan_cluster_1--pan_cluster_2--genome_1': ['genome_1', 'pan_cluster_1', 'pan_cluster_2', 99, 3,
['Acc_1', 'Acc_2'], ['low_1']],
'pan_cluster_1--pan_cluster_2--genome_2': ['genome_2', 'pan_cluster_1', 'pan_cluster_2', 99, 3,
['Acc_1', 'Acc_2'], ['low_1']],
'pan_cluster_1--pan_cluster_2--genome_3': ['genome_3', 'pan_cluster_1', 'pan_cluster_2', 99, 3,
['Acc_1', 'Acc_2'], ['low_1']],
'pan_cluster_2--pan_cluster_3--genome_1': ['genome_1', 'pan_cluster_2', 'pan_cluster_3', 100, 2,
['Acc_1', 'Acc_2'], []],
'pan_cluster_2--pan_cluster_4--genome_2': ['genome_2', 'pan_cluster_2', 'pan_cluster_4', 150, 1,
['Acc_1', ], []],
'pan_cluster_2--pan_cluster_3--genome_3': ['genome_3', 'pan_cluster_2', 'pan_cluster_3', 200, 0,
[], []],
'pan_cluster_3--pan_cluster_4--genome_1': ['genome_1', 'pan_cluster_3', 'pan_cluster_4', -5, 0,
[], []],
'pan_cluster_3--pan_cluster_4--genome_3': ['genome_3', 'pan_cluster_3', 'pan_cluster_4', -10, 0,
[], []]
}
out_path = 'TestWritingOutputFunction'
prefix = 'test'
expected_low_freq = 'TestWritingOutputFunction/low_freq.txt'
expected_gene_content = 'TestWritingOutputFunction/gene_content.txt'
output_writer_functions.master_info_writer(master_info, out_path, prefix)
with open(expected_low_freq, 'r') as expected:
with open('TestWritingOutputFunction/test_low_frequency_gene_placement.tsv', 'r') as result:
self.assertEqual(expected.readlines(), result.readlines())
with open(expected_gene_content, 'r') as expected:
with open('TestWritingOutputFunction/test_core_core_accessory_gene_content.tsv', 'r') as result:
self.assertEqual(expected.readlines(), result.readlines())
def test_summary_info_writer(self):
input_dict = {
'pan_cluster_1--pan_cluster_2': ['pan_cluster_1-pan_cluster_2', 3, 3, 3, 3, 99, 99, 99.0, 99.0, 3, 3, 3.0,
3.0],
'pan_cluster_2--pan_cluster_3': ['pan_cluster_2-pan_cluster_3', 2, 3, 2, 2, 100, 200, 150.0, 150.0, 0, 2,
1.0, 1.0],
'pan_cluster_2--pan_cluster_4': ['pan_cluster_2-pan_cluster_4', 1, 3, 3, 3, 150, 150, 150.0, 150.0, 1, 1,
1.0, 1.0],
'pan_cluster_3--pan_cluster_4': ['pan_cluster_3-pan_cluster_4', 2, 2, 3, 2, -10, -5, -7.5, -7.5, 0, 0, 0.0,
0.0]}
out_path = 'TestWritingOutputFunction'
prefix = 'test'
expected_summary_table = 'TestWritingOutputFunction/summary_table.txt'
output_writer_functions.summary_info_writer(input_dict, out_path, prefix)
with open(expected_summary_table, 'r') as expected:
with open('TestWritingOutputFunction/test_core_pair_summary.csv', 'r') as result:
self.assertEqual(expected.readlines(), result.readlines())
def test_segment_writer(self):
input_segments = {'pan_cluster_2--pan_cluster_4': ['pan_cluster_2',
'pan_cluster_3',
'pan_cluster_4'],
'pan_cluster_2--pan_cluster_6': ['pan_cluster_6',
'pan_cluster_1',
'pan_cluster_2'],
'pan_cluster_6--pan_cluster_4': ['pan_cluster_6',
'pan_cluster_5',
'pan_cluster_4']}
out_path = 'TestWritingOutputFunction'
prefix = 'test'
expected_summary_table = 'TestWritingOutputFunction/core_segments.txt'
output_writer_functions.segment_writer(input_segments, out_path, prefix)
with open(expected_summary_table, 'r') as expected:
with open('TestWritingOutputFunction/test_core_segments.csv', 'r') as result:
self.assertEqual(expected.readlines(), result.readlines())
def test_no_acc_segment_writer(self):
input_segments = {'pan_cluster_2--pan_cluster_4': [['pan_cluster_2'],
['pan_cluster_3',
'pan_cluster_4']],
'pan_cluster_6--pan_cluster_2': [['pan_cluster_2'],
['pan_cluster_1'],
['pan_cluster_6']],
'pan_cluster_6--pan_cluster_4': [['pan_cluster_6'],
['pan_cluster_5',
'pan_cluster_4']
]}
out_path = 'TestWritingOutputFunction'
prefix = 'test'
expected_summary_table = 'TestWritingOutputFunction/no_acc_segments.txt'
output_writer_functions.no_acc_segment_writer(input_segments, out_path, prefix)
with open(expected_summary_table, 'r') as expected:
with open('TestWritingOutputFunction/test_no_accessory_core_segments.csv', 'r') as result:
self.assertEqual(expected.readlines(), result.readlines())
def test_coreless_contig_writer(self):
coreless_contigs = {'gff_1--contig_x': [['pan_cluster_2'], ['pan_cluster_3']],
'gff_1--contig_y': [['pan_cluster_2'], []],
'gff_1--contig_z': [[], ['pan_cluster_6']]}
out_path = 'TestWritingOutputFunction'
prefix = 'test'
expected_summary_table = 'TestWritingOutputFunction/no_core_contigs.txt'
output_writer_functions.non_core_contig_writer(coreless_contigs, out_path, prefix)
with open(expected_summary_table, 'r') as expected:
with open('TestWritingOutputFunction/test_coreless_contig_accessory_gene_content.tsv', 'r') as result:
self.assertEqual(expected.readlines(), result.readlines())
if __name__ == '__main__':
unittest.main()
| 64.467709
| 653
| 0.550035
| 27,598
| 251,553
| 4.484709
| 0.023045
| 0.066172
| 0.016224
| 0.014398
| 0.914712
| 0.899741
| 0.878015
| 0.867245
| 0.848613
| 0.836074
| 0
| 0.040166
| 0.335011
| 251,553
| 3,901
| 654
| 64.484235
| 0.699725
| 0.0271
| 0
| 0.750882
| 0
| 0.004489
| 0.31215
| 0.195766
| 0
| 0
| 0
| 0.001025
| 0.097467
| 1
| 0.042001
| false
| 0.001282
| 0.004809
| 0
| 0.055787
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 7
|
0d49e46e02b7a2e802abfa3ffcfeaf2ce71bf765
| 138
|
py
|
Python
|
codesignal/arcade/python/intro_15_add_border.py
|
tinesife94/random
|
b802924dce4635ae074d30dc03962d4301bd6d8b
|
[
"MIT"
] | null | null | null |
codesignal/arcade/python/intro_15_add_border.py
|
tinesife94/random
|
b802924dce4635ae074d30dc03962d4301bd6d8b
|
[
"MIT"
] | null | null | null |
codesignal/arcade/python/intro_15_add_border.py
|
tinesife94/random
|
b802924dce4635ae074d30dc03962d4301bd6d8b
|
[
"MIT"
] | null | null | null |
def solution(picture):
t = ('*{}*'.format(''.join(s)) for s in zip(*picture))
return ['*{}*'.format(''.join(s)) for s in zip(*t)]
| 34.5
| 58
| 0.528986
| 21
| 138
| 3.47619
| 0.52381
| 0.273973
| 0.30137
| 0.383562
| 0.547945
| 0.547945
| 0.547945
| 0
| 0
| 0
| 0
| 0
| 0.166667
| 138
| 3
| 59
| 46
| 0.634783
| 0
| 0
| 0
| 0
| 0
| 0.057971
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0.333333
| false
| 0
| 0
| 0
| 0.666667
| 0
| 1
| 0
| 0
| null | 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 1
| 0
|
0
| 7
|
b4d1a33007c9d59ae5c013ed839d67f12e2f6011
| 11,911
|
py
|
Python
|
users/users/admin_api/tests/query/test_search_query.py
|
madEng84/pycon
|
31d71b7a86398a7b0518e909ae939aaffd181091
|
[
"MIT"
] | null | null | null |
users/users/admin_api/tests/query/test_search_query.py
|
madEng84/pycon
|
31d71b7a86398a7b0518e909ae939aaffd181091
|
[
"MIT"
] | null | null | null |
users/users/admin_api/tests/query/test_search_query.py
|
madEng84/pycon
|
31d71b7a86398a7b0518e909ae939aaffd181091
|
[
"MIT"
] | null | null | null |
from ward import test
from users.api.tests.graphql_client import admin_graphql_client
from users.tests.factories import user_factory, user_factory_batch
from users.tests.session import db
@test("search users by email")
async def _(
admin_graphql_client=admin_graphql_client, db=db, user_factory=user_factory
):
logged_user = await user_factory(
email="user@email.it",
fullname="Giorgina Giogio",
name="Giorgina",
is_staff=True,
)
admin_graphql_client.force_login(logged_user)
await user_factory(
email="another-email@email.it",
fullname="Giorgina Giogio",
name="Giorgina",
is_staff=True,
)
query = """query($query: String!) {
search(query: $query) {
users {
id
email
}
}
}"""
response = await admin_graphql_client.query(query, variables={"query": "user"})
assert not response.errors
assert response.data == {
"search": {"users": [{"id": logged_user.id, "email": logged_user.email}]}
}
@test("search users fuzzy multiple words")
async def _(
admin_graphql_client=admin_graphql_client, db=db, user_factory=user_factory
):
logged_user = await user_factory(
email="user@email.it",
fullname="Giorgina Giogio",
name="Giorgina",
is_staff=True,
)
admin_graphql_client.force_login(logged_user)
another_user = await user_factory(
email="another-email@email.it",
fullname="Giorgina Giogio",
name="Giorgina",
is_staff=True,
)
query = """query($query: String!) {
search(query: $query) {
users {
id
email
}
}
}"""
response = await admin_graphql_client.query(
query, variables={"query": "user email"}
)
assert not response.errors
assert {"id": logged_user.id, "email": logged_user.email} in response.data[
"search"
]["users"]
assert {"id": another_user.id, "email": another_user.email} in response.data[
"search"
]["users"]
@test("search users by fullname")
async def _(
admin_graphql_client=admin_graphql_client, db=db, user_factory=user_factory
):
logged_user = await user_factory(
email="user@email.it",
fullname="Giorgina Giogio",
name="Giorgina",
is_staff=True,
)
admin_graphql_client.force_login(logged_user)
another_user = await user_factory(
email="another-email@email.it",
fullname="Giorgina Buonofiglio",
name="Cattiva",
is_staff=False,
)
query = """query($query: String!) {
search(query: $query) {
users {
id
email
}
}
}"""
response = await admin_graphql_client.query(query, variables={"query": "Giorg"})
assert not response.errors
assert len(response.data["search"]["users"]) == 2
assert {"id": logged_user.id, "email": logged_user.email} in response.data[
"search"
]["users"]
assert {"id": another_user.id, "email": another_user.email} in response.data[
"search"
]["users"]
@test("search users empty result")
async def _(
admin_graphql_client=admin_graphql_client, db=db, user_factory=user_factory
):
logged_user = await user_factory(
email="user@email.it",
fullname="Giorgina Giogio",
name="Giorgina",
is_staff=True,
)
admin_graphql_client.force_login(logged_user)
await user_factory(
email="another-email@email.it",
fullname="Giorgina Buonofiglio",
name="Cattiva",
is_staff=False,
)
query = """query($query: String!) {
search(query: $query) {
users {
id
email
}
}
}"""
response = await admin_graphql_client.query(query, variables={"query": "Emerlinda"})
assert not response.errors
assert len(response.data["search"]["users"]) == 0
@test("search users with empty query returns nothing")
async def _(
admin_graphql_client=admin_graphql_client, db=db, user_factory=user_factory
):
logged_user = await user_factory(
email="user@email.it",
fullname="Giorgina Giogio",
name="Giorgina",
is_staff=True,
)
admin_graphql_client.force_login(logged_user)
await user_factory(
email="another-email@email.it",
fullname="Giorgina Buonofiglio",
name="Cattiva",
is_staff=False,
)
query = """query($query: String!) {
search(query: $query) {
users {
id
email
}
}
}"""
response = await admin_graphql_client.query(query, variables={"query": ""})
assert not response.errors
assert len(response.data["search"]["users"]) == 0
@test("cannot search users unlogged")
async def _(
admin_graphql_client=admin_graphql_client, db=db, user_factory=user_factory
):
await user_factory(
email="user@email.it",
fullname="Giorgina Giogio",
name="Giorgina",
is_staff=True,
)
await user_factory(
email="another-email@email.it",
fullname="Giorgina Buonofiglio",
name="Cattiva",
is_staff=False,
)
query = """query($query: String!) {
search(query: $query) {
users {
id
email
}
}
}"""
response = await admin_graphql_client.query(query, variables={"query": "Emerlinda"})
assert response.errors[0]["message"] == "Unauthorized"
assert not response.data
@test("cannot search users if not staff")
async def _(
admin_graphql_client=admin_graphql_client, db=db, user_factory=user_factory
):
logged_user = await user_factory(
email="user@email.it",
fullname="Giorgina Giogio",
name="Giorgina",
is_staff=False,
)
admin_graphql_client.force_login(logged_user)
await user_factory(
email="another-email@email.it",
fullname="Giorgina Buonofiglio",
name="Cattiva",
is_staff=False,
)
query = """query($query: String!) {
search(query: $query) {
users {
id
email
}
}
}"""
response = await admin_graphql_client.query(query, variables={"query": "Emerlinda"})
assert response.errors[0]["message"] == "Unauthorized"
assert not response.data
@test("search users only returns 10 matches")
async def _(
admin_graphql_client=admin_graphql_client,
db=db,
user_factory_batch=user_factory_batch,
user_factory=user_factory,
):
logged_user = await user_factory(
email="user@world.it",
fullname="Giorgina Giogio",
name="Giorgina",
is_staff=True,
)
admin_graphql_client.force_login(logged_user)
await user_factory_batch(
50,
email="another-email@email.it",
fullname="Giorgina Giogio",
name="Giorgina",
is_staff=True,
)
query = """query($query: String!) {
search(query: $query) {
users {
id
email
}
}
}"""
response = await admin_graphql_client.query(
query, variables={"query": "another email"}
)
assert not response.errors
assert len(response.data["search"]["users"]) == 10
@test("search users by fullname")
async def _(
admin_graphql_client=admin_graphql_client, db=db, user_factory=user_factory
):
logged_user = await user_factory(
email="user@email.it",
fullname="Giorgina Giogio",
name="Giorgina",
is_staff=True,
)
admin_graphql_client.force_login(logged_user)
another_user = await user_factory(
email="another-email@email.it",
fullname="Giorgina Buonofiglio",
name="Cattiva",
is_staff=False,
)
query = """query($query: String!) {
search(query: $query) {
users {
id
email
}
}
}"""
response = await admin_graphql_client.query(query, variables={"query": "Giorg"})
assert not response.errors
assert len(response.data["search"]["users"]) == 2
assert {"id": logged_user.id, "email": logged_user.email} in response.data[
"search"
]["users"]
assert {"id": another_user.id, "email": another_user.email} in response.data[
"search"
]["users"]
@test("search users empty result")
async def _(
admin_graphql_client=admin_graphql_client, db=db, user_factory=user_factory
):
logged_user = await user_factory(
email="user@email.it",
fullname="Giorgina Giogio",
name="Giorgina",
is_staff=True,
)
admin_graphql_client.force_login(logged_user)
await user_factory(
email="another-email@email.it",
fullname="Giorgina Buonofiglio",
name="Cattiva",
is_staff=False,
)
query = """query($query: String!) {
search(query: $query) {
users {
id
email
}
}
}"""
response = await admin_graphql_client.query(query, variables={"query": "Emerlinda"})
assert not response.errors
assert len(response.data["search"]["users"]) == 0
@test("search users with empty query returns nothing")
async def _(
admin_graphql_client=admin_graphql_client, db=db, user_factory=user_factory
):
logged_user = await user_factory(
email="user@email.it",
fullname="Giorgina Giogio",
name="Giorgina",
is_staff=True,
)
admin_graphql_client.force_login(logged_user)
await user_factory(
email="another-email@email.it",
fullname="Giorgina Buonofiglio",
name="Cattiva",
is_staff=False,
)
query = """query($query: String!) {
search(query: $query) {
users {
id
email
}
}
}"""
response = await admin_graphql_client.query(query, variables={"query": ""})
assert not response.errors
assert len(response.data["search"]["users"]) == 0
@test("cannot search users unlogged")
async def _(
admin_graphql_client=admin_graphql_client, db=db, user_factory=user_factory
):
await user_factory(
email="user@email.it",
fullname="Giorgina Giogio",
name="Giorgina",
is_staff=True,
)
await user_factory(
email="another-email@email.it",
fullname="Giorgina Buonofiglio",
name="Cattiva",
is_staff=False,
)
query = """query($query: String!) {
search(query: $query) {
users {
id
email
}
}
}"""
response = await admin_graphql_client.query(query, variables={"query": "Emerlinda"})
assert response.errors[0]["message"] == "Unauthorized"
assert not response.data
@test("cannot search users if not staff")
async def _(
admin_graphql_client=admin_graphql_client, db=db, user_factory=user_factory
):
logged_user = await user_factory(
email="user@email.it",
fullname="Giorgina Giogio",
name="Giorgina",
is_staff=False,
)
admin_graphql_client.force_login(logged_user)
await user_factory(
email="another-email@email.it",
fullname="Giorgina Buonofiglio",
name="Cattiva",
is_staff=False,
)
query = """query($query: String!) {
search(query: $query) {
users {
id
email
}
}
}"""
response = await admin_graphql_client.query(query, variables={"query": "Emerlinda"})
assert response.errors[0]["message"] == "Unauthorized"
assert not response.data
| 25.893478
| 88
| 0.592561
| 1,288
| 11,911
| 5.284161
| 0.054348
| 0.090508
| 0.134881
| 0.077138
| 0.957831
| 0.950485
| 0.945636
| 0.945636
| 0.940641
| 0.940641
| 0
| 0.001879
| 0.285031
| 11,911
| 459
| 89
| 25.949891
| 0.797323
| 0
| 0
| 0.80916
| 0
| 0
| 0.322139
| 0.024011
| 0
| 0
| 0
| 0
| 0.07888
| 1
| 0
| false
| 0
| 0.010178
| 0
| 0.010178
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 7
|
b4ec25bf20bb83e7f0be19f8169f15a34b6389aa
| 848
|
py
|
Python
|
tests/test_77.py
|
sungho-joo/leetcode2github
|
ce7730ef40f6051df23681dd3c0e1e657abba620
|
[
"MIT"
] | null | null | null |
tests/test_77.py
|
sungho-joo/leetcode2github
|
ce7730ef40f6051df23681dd3c0e1e657abba620
|
[
"MIT"
] | null | null | null |
tests/test_77.py
|
sungho-joo/leetcode2github
|
ce7730ef40f6051df23681dd3c0e1e657abba620
|
[
"MIT"
] | null | null | null |
#!/usr/bin/env python
import pytest
"""
Test 77. Combinations
"""
@pytest.fixture(scope="session")
def init_variables_77():
from src.leetcode_77_combinations import Solution
solution = Solution()
def _init_variables_77():
return solution
yield _init_variables_77
class TestClass77:
def test_solution_0(self, init_variables_77):
assert init_variables_77().combine(4, 2) == [[1]]
#!/usr/bin/env python
import pytest
"""
Test 77. Combinations
"""
@pytest.fixture(scope="session")
def init_variables_77():
from src.leetcode_77_combinations import Solution
solution = Solution()
def _init_variables_77():
return solution
yield _init_variables_77
class TestClass77:
def test_solution_0(self, init_variables_77):
assert init_variables_77().combine(4, 2) == [[1]]
| 16.627451
| 57
| 0.700472
| 108
| 848
| 5.203704
| 0.277778
| 0.231317
| 0.266904
| 0.128114
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0.058309
| 0.191038
| 848
| 50
| 58
| 16.96
| 0.760933
| 0.04717
| 0
| 1
| 0
| 0
| 0.018717
| 0
| 0
| 0
| 0
| 0
| 0.090909
| 1
| 0.272727
| false
| 0
| 0.181818
| 0.090909
| 0.636364
| 0
| 0
| 0
| 0
| null | 1
| 1
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 10
|
3704b5995bd56bd15f221b0726586fbb5f80a84c
| 193
|
py
|
Python
|
datacatalog/linkedstores/pipeline/__init__.py
|
SD2E/python-datacatalog
|
51ab366639505fb6e8a14cd6b446de37080cd20d
|
[
"CNRI-Python"
] | null | null | null |
datacatalog/linkedstores/pipeline/__init__.py
|
SD2E/python-datacatalog
|
51ab366639505fb6e8a14cd6b446de37080cd20d
|
[
"CNRI-Python"
] | 2
|
2019-07-25T15:39:04.000Z
|
2019-10-21T15:31:46.000Z
|
datacatalog/linkedstores/pipeline/__init__.py
|
SD2E/python-datacatalog
|
51ab366639505fb6e8a14cd6b446de37080cd20d
|
[
"CNRI-Python"
] | 1
|
2019-10-15T14:33:44.000Z
|
2019-10-15T14:33:44.000Z
|
from .store import PipelineDocument, PipelineStore, StoreInterface
from .store import PipelineUpdateFailure, PipelineCreateFailure, DuplicatePipelineError
from .store import SerializedPipeline
| 48.25
| 87
| 0.880829
| 16
| 193
| 10.625
| 0.625
| 0.158824
| 0.264706
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.082902
| 193
| 3
| 88
| 64.333333
| 0.960452
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| true
| 0
| 1
| 0
| 1
| 0
| 1
| 0
| 1
| null | 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 0
| 1
| 0
|
0
| 7
|
2ec8076f34383919e8ff9b0ea7d6e8fd2adb0f92
| 143
|
py
|
Python
|
Pyflai/style/__init__.py
|
CMakerA/Pyflai
|
4c3ed66d0d046de56c4bf6a04cb6ce8ebdf34693
|
[
"Apache-2.0"
] | 1
|
2018-02-14T08:33:25.000Z
|
2018-02-14T08:33:25.000Z
|
Pyflai/style/__init__.py
|
CMakerA/Pyflai
|
4c3ed66d0d046de56c4bf6a04cb6ce8ebdf34693
|
[
"Apache-2.0"
] | null | null | null |
Pyflai/style/__init__.py
|
CMakerA/Pyflai
|
4c3ed66d0d046de56c4bf6a04cb6ce8ebdf34693
|
[
"Apache-2.0"
] | null | null | null |
__all__ = ["color", "font"]
from Pyflai.style.color.Color import *
from Pyflai.style.font.Font import *
from Pyflai.style.font.Fonts import *
| 23.833333
| 38
| 0.741259
| 21
| 143
| 4.857143
| 0.380952
| 0.294118
| 0.441176
| 0.411765
| 0.490196
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.118881
| 143
| 5
| 39
| 28.6
| 0.809524
| 0
| 0
| 0
| 0
| 0
| 0.062937
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| false
| 0
| 0.75
| 0
| 0.75
| 0
| 1
| 0
| 0
| null | 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 0
|
0
| 7
|
2ee549a3183b573b0b024e9d7206d23476483425
| 94,072
|
py
|
Python
|
code/python/FactSetESG/v1/fds/sdk/FactSetESG/api/sdg_api.py
|
factset/enterprise-sdk
|
3fd4d1360756c515c9737a0c9a992c7451d7de7e
|
[
"Apache-2.0"
] | 6
|
2022-02-07T16:34:18.000Z
|
2022-03-30T08:04:57.000Z
|
code/python/FactSetESG/v1/fds/sdk/FactSetESG/api/sdg_api.py
|
factset/enterprise-sdk
|
3fd4d1360756c515c9737a0c9a992c7451d7de7e
|
[
"Apache-2.0"
] | 2
|
2022-02-07T05:25:57.000Z
|
2022-03-07T14:18:04.000Z
|
code/python/FactSetESG/v1/fds/sdk/FactSetESG/api/sdg_api.py
|
factset/enterprise-sdk
|
3fd4d1360756c515c9737a0c9a992c7451d7de7e
|
[
"Apache-2.0"
] | null | null | null |
"""
FactSet ESG API
FactSet ESG (powered by FactSet Truvalue Labs) applies machine learning to uncover risks and opportunities from companies' Environmental, Social and Governance (ESG) behavior, which are aggregated and categorized into continuously updated, material ESG scores. The service focuses on company ESG behavior from external sources and includes both positive and negative events that go beyond traditional sources of ESG risk data.<p> FactSet ESG extracts, analyzes, and generates scores from millions of documents each month collected from more than 100,000 data sources in over 13 languages. Sources include news, trade journals, NGOs, watchdog groups, trade blogs, industry reports and social media. Products deliver investable insights by revealing value and risk factors from unstructured data at the speed of current events.</p> # noqa: E501
The version of the OpenAPI document: 1.3.0
Contact: api@factset.com
Generated by: https://openapi-generator.tech
"""
import re # noqa: F401
import sys # noqa: F401
from multiprocessing.pool import ApplyResult
import typing
from fds.sdk.FactSetESG.api_client import ApiClient, Endpoint as _Endpoint
from fds.sdk.FactSetESG.model_utils import ( # noqa: F401
check_allowed_values,
check_validations,
date,
datetime,
file_type,
none_type,
validate_and_convert_types
)
from fds.sdk.FactSetESG.exceptions import ApiException
from fds.sdk.FactSetESG.model.error_response import ErrorResponse
from fds.sdk.FactSetESG.model.sdg_scores_request import SdgScoresRequest
from fds.sdk.FactSetESG.model.sdg_scores_response import SdgScoresResponse
class SDGApi(object):
"""NOTE: This class is auto generated by OpenAPI Generator
Ref: https://openapi-generator.tech
Do not edit the class manually.
"""
def __init__(self, api_client=None):
if api_client is None:
api_client = ApiClient()
self.api_client = api_client
self.get_sdg_scores_endpoint = _Endpoint(
settings={
'response_type': (
{ 200: (SdgScoresResponse,), 400: (ErrorResponse,), 401: (ErrorResponse,), 403: (ErrorResponse,), 415: (ErrorResponse,), 500: (ErrorResponse,), },
None
),
'auth': [
'FactSetApiKey',
'FactSetOAuth2'
],
'endpoint_path': '/factset-esg/v1/sdg-scores',
'operation_id': 'get_sdg_scores',
'http_method': 'GET',
'servers': None,
},
params_map={
'all': [
'ids',
'start_date',
'end_date',
'score_types',
'categories',
'frequency',
],
'required': [
'ids',
'start_date',
'end_date',
],
'nullable': [
],
'enum': [
'frequency',
],
'validation': [
'ids',
'score_types',
]
},
root_map={
'validations': {
('ids',): {
'max_items': 1500,
'min_items': 1,
},
('score_types',): {
'max_items': 8,
'min_items': 1,
},
},
'allowed_values': {
('frequency',): {
"D": "D",
"W": "W",
"M": "M",
"CY": "CY",
"EMPTY": ""
},
},
'openapi_types': {
'ids':
([str],),
'start_date':
(str,),
'end_date':
(str,),
'score_types':
([str],),
'categories':
([str],),
'frequency':
(str,),
},
'attribute_map': {
'ids': 'ids',
'start_date': 'startDate',
'end_date': 'endDate',
'score_types': 'scoreTypes',
'categories': 'categories',
'frequency': 'frequency',
},
'location_map': {
'ids': 'query',
'start_date': 'query',
'end_date': 'query',
'score_types': 'query',
'categories': 'query',
'frequency': 'query',
},
'collection_format_map': {
'ids': 'csv',
'score_types': 'csv',
'categories': 'csv',
}
},
headers_map={
'accept': [
'application/json'
],
'content_type': [],
},
api_client=api_client
)
self.get_sdg_scores_post_endpoint = _Endpoint(
settings={
'response_type': (
{ 200: (SdgScoresResponse,), 400: (ErrorResponse,), 401: (ErrorResponse,), 403: (ErrorResponse,), 415: (ErrorResponse,), 500: (ErrorResponse,), },
None
),
'auth': [
'FactSetApiKey',
'FactSetOAuth2'
],
'endpoint_path': '/factset-esg/v1/sdg-scores',
'operation_id': 'get_sdg_scores_post',
'http_method': 'POST',
'servers': None,
},
params_map={
'all': [
'sdg_scores_request',
],
'required': [
'sdg_scores_request',
],
'nullable': [
],
'enum': [
],
'validation': [
]
},
root_map={
'validations': {
},
'allowed_values': {
},
'openapi_types': {
'sdg_scores_request':
(SdgScoresRequest,),
},
'attribute_map': {
},
'location_map': {
'sdg_scores_request': 'body',
},
'collection_format_map': {
}
},
headers_map={
'accept': [
'application/json'
],
'content_type': [
'application/json'
]
},
api_client=api_client
)
@staticmethod
def apply_kwargs_defaults(kwargs, return_http_data_only, async_req):
kwargs["async_req"] = async_req
kwargs["_return_http_data_only"] = return_http_data_only
kwargs["_preload_content"] = kwargs.get("_preload_content", True)
kwargs["_request_timeout"] = kwargs.get("_request_timeout", None)
kwargs["_check_input_type"] = kwargs.get("_check_input_type", True)
kwargs["_check_return_type"] = kwargs.get("_check_return_type", True)
kwargs["_spec_property_naming"] = kwargs.get("_spec_property_naming", False)
kwargs["_content_type"] = kwargs.get("_content_type")
kwargs["_host_index"] = kwargs.get("_host_index")
def get_sdg_scores(
self,
ids,
start_date,
end_date,
**kwargs
) -> SdgScoresResponse:
"""Gets short-term, long-term, and momentum scores based on the 16 Sustainable Development Goals categories defined by the United Nations. # noqa: E501
Truvalue Labs SDG Scores provides short-term, long-term, and momentum scores that are generated for 16 Sustainable Development Goals categories defined by the United Nations. # noqa: E501
This method makes a synchronous HTTP request. Returns the http data only
Args:
ids ([str]): Security or Entity identifiers. ISIN, Truvalue Identifiers, instrumentTvlId, instrumentPermId, and Tickers are accepted inputs. <p>***ids limit** = 1500 per request*</p> *<p>Make note, GET Method URL request lines are also limited to a total length of 8192 bytes (8KB). In cases where the service allows for thousands of ids, which may lead to exceeding this request line limit of 8KB, its advised for any requests with large request lines to be requested through the respective \"POST\" method.</p>*
start_date (str): TThe start date requested for a given date range in YYYY-MM-DD format. The input start date must be before the input end date. Future dates (T+1) are not accepted in this endpoint.
end_date (str): The end date requested for a given date range in YYYY-MM-DD format. The input end date must be after the input start date. Future dates (T+1) are not accepted in this endpoint.
Keyword Args:
score_types ([str]): The FactSet Truvalue Score types being requested. Score Types include the Pulse, Insight, Momentum, Article Volume, Category Volume, Dynamic Materiality Score, Adjusted Insight, and Industry Percentile. When left blank all score types will be returned for the requested Id. To specify select scores returned in the response, provide a comma-separated list of the scores using the description below. <p>These scores use the same underlying data and 100-point scale, except for ranks and percentiles. The cognitive computing system behind FactSet's Truvalue Platform uses natural language processing to interpret semantic content and generate analytics. It does so by applying criteria that are consistent with established sustainability and ESG frameworks, scoring data points on performance using a 0 to 100 scale. A score of 50 represents a neutral impact. Scores above 50 indicate positive performance, and scores below reflect negative performance.</p> ### SDG Score Types |**SDG Category Input**|**Description**| |---|---| |**PULSE**|*Pulse Score*, is a measure of near-term performance changes that highlights opportunities and controversies, enabling real-time monitoring of companies. It focuses on events of the day and provides a responsive signal to alert investors to dynamic moves.| |**INSIGHT**|*Insight Score*, a measure of a company's longer-term SDG track record, similar to a ratings system. Scores are less sensitive to daily events and reflect the enduring performance record of a company over time. Scores are derived using an exponentially-weighted moving average of the Pulse, defined below, and the half-life of an event's influence on the overall score is 6 months.| |**MOMENTUM**|*Momentum Score*, measures the trend of a company's Insight score. It is a unique ESG metric in the industry that gives investors a high-precision view of the trajectory of a company's SDG performance, relative to peers. It does so by precisely articulating upward or downward movement, relative to that of all others in the universe, making it a measure that enhances quantitative workflows.| |**DYNAMIC_MAT**|*Dynamic Materiality Score*, shows the percentage value of data flow by category compared with the total data flow for the organization | |**ADJ_INSIGHT**|*Adjusted Insight*, Measures company SDG performance, generating scores for lower-volume and zero-volume firms by blending company scores with industry medians. **(ONLY Vaild for `IMPACT` category)**. <p>**Adjusted Insight Score = (# of articles / 5) x Company Insight Score + ((5 - # of articles) / 5) x Industry (or Sector) Median Insight Score.</p>| |**IND_PCTL**|*Industry Percentile*, offers context on company Adjusted Insight scores relative to peers in the same SICS Industry. This value is used to establish the textual ESG Rank. **(ONLY Vaild for `IMPACT` category).**| |**TTM_VOLUME**|*TTM Volume*, measures the data flow over a trailing twelve month period.| |**SDG_RANK**|*SDG Rank*, offers rank leveraging the 16 ESG categories defined by Sustainable Development Goals (SDG).The datatype for the data returned when this score types is requested is String**(ONLY Vaild for `IMPACT` category)**.| . [optional] if omitted the server will use the default value of ["ALL"]
categories ([str]): The SDG Categories specified for the Truvalue Scores being requested. To specify select categories returned in the response, provide a comma-separated list of the scores using the description below. |**SDG Category Input**|**Description**| |---|---| |**IMPACT**|**Impact** - The aggregate SDG score for each company is simply named \"Impact.\" The SDG Impact Score is produced using a weighted average of individual category scores, where the weight utilized is the category score volume.| |**IMPACTARTICLES**|**Impact Articles** - The All Goals Category Volume measures the total number of times any of the 16 goals received a score over a trailing twelve-month (TTM) period of time. **( Data wil be returned only for `TTM_VOLUME` score type)**| |**GOAL1NOPOVERTY**|**No Poverty** - Goal 1 focuses on poverty in all its manifestations and also aims to ensure social protection for the poor and vulnerable, increase access to basic services and support people harmed by climate related extreme events and other economic, social and environmental shocks and disasters. <p>**Company-Level Issue Examples** *- Financial services access and affordability, Underserved groups,Unethical pricing.*| |**GOAL2ZEROHUNGER**|**Zero Hunger** - Goal 2 aims to end hunger and all forms of malnutrition and commits to universal access to safe, nutritious and sufficient food at all times of the year, particularly for the poor and people in vulnerable situations (e.g., infants). This will require sustainable food production systems and resilient agricultural practices, equal access to land, technology, and markets and international cooperation on investments in infrastructure and technology to boost agricultural productivity. <p>**Company-Level Issue Examples** *- Sustainable agricultural practices, Agricultural ingredients sourcing and certifications, Food safety concerns, Animal welfare.*| |**GOAL3GOODHEALTHANDWELLBEING**|**Good Health and Wellbeing** - Goal 3 seeks to ensure health and wellbeing for all, at every stage of life and addresses all major health priorities, including reproductive, maternal, and child health; communicable, noncommunicable and environmental diseases; universal health coverage; and access for all to safe, effective, quality, and affordable medicines and vaccines.<p> **Company-Level Issue Examples** *- Harmful Chemicals in Products, Product Recalls, Healthcare Access and Affordability.*| |**GOAL4QUALITYEDUCATION**|**Quality Education** - Goal 4 addresses access and affordability of education and skills development starting from childhood development and continuing through adulthood, including for girls, persons with disabilities, indigenous peoples and children in vulnerable situations, Improvements to the access to education it hopes to ensure that all youth and a substantial proportion of adults achieve literacy and numeracy. It also seeks to build and upgrade education facilities and to increase the supply of qualified teachers.<p>**Company-Level Issue Examples** *- Mentorship and training, Education company quality, Education company ethics.*| |**GOAL5GENDEREQUALITY**|**Gender Equality** - Goal 5 emphasizes eliminating discrimination and violence against women and girls. The Goal emphasizes ensuring women's full and effective participation and equal opportunities for leadership at all levels of decision-making in political, economic and public life. Access to sexual and reproductive health and reproductive rights and access to economic resources (e.g., land ownership, financial services) are also emphasized.<p>**Company-Level Issue Examples** *- Board Diversity, Gender Discrimination, Sexual Harassment.*| |**GOAL6CLEANWATERANDSANITATION**|**Clean Water and Sanitation** - Goal 6 not only addresses issues relating to drinking water, sanitation and hygiene, but also the quality and sustainability of water resources worldwide. It strives to achieve universal and equitable access to safe and affordable drinking water for all. It also focuses on adequate and equitable sanitation and hygiene and reducing pollution, minimizing release of hazardous chemicals and materials, and protection of water-related ecosystems. It also highlights increasing water-use efficiency across all sectors, recycling, and ensuring sustainable withdrawals and supply of freshwater.<p>**Company-Level Issue Examples** *- Water Pollution, Water Recycling and Stewardship, Water Infrastructure.*| |**GOAL7AFFORDABLEANDCLEANENERGY**|**Goal 7 Affordable and Clean Energy** - Goal 7 seeks to ensure access to affordable, reliable, and modern energy services for all. It aims to increase renewable energy in the global energy mix and improve energy efficiency significantly. It also calls for more access to clean energy research, technology, and infrastructure for renewable energy, energy efficiency, and advanced and cleaner fossil-fuel technology, and promoting investment in energy infrastructure and clean energy technology.<p>**Company-Level Issue Examples** *- Green Buildings, Renewable Energy, Unethical Utility Pricing.*| |**GOAL8DECENTWORKANDECONOMICGROWTH**|**Decent Work and Economic Growth** - Goal 8 focuses on economic productivity and supports policies for entrepreneurship, creativity and innovation that assist micro, small, and medium-sized enterprises. The Goal also seeks to reduce unemployment, the proportion of youth not working or in school, child labor, and forced labor. Also covered are the protection of labor rights, migrant workers, safe and secure working environments, sustainable tourism, and increasing the capacity of domestic financial institutions in regards to access to banking, insurance, and financial services.<p>**Company-Level Issue Examples** *- Job Creation, Labor Exploitation, Employee Health and Safety, Workplace Turnover, Supplier Transparency.*| |**GOAL9INDUSTRYINNOVATIONANDINFRASTRUCTURE**|**Industry Innovation and Infrastructure** - Goal 9 focuses on three important aspects of sustainable development, infrastructure, industrialization and innovation, including considerations for resiliency, equity, quality, reliability, access and affordability, and regional and transborder infrastructure. The Goal focuses on infrastructure upgrades and retrofitting of industries with increased resource-use efficiency and clean and environmentally sound technologies and industrial processes.<p>**Company-Level Issue Examples** *- Digital Divide, ESG integration in financial services, Engineering Structural Integrity.*| |**GOAL10REDUCEDINEQUALITIES**|**Reduced Inequalities** - Goal 10 calls for reducing inequalities in income as well as those based on age, sex, disability, race, ethnicity, origin, religion, or economic or other status within a country. The Goal addresses inequalities among countries, including those related to representation, migration, and development assistance. It aims to empower and promote social, economic, and political inclusion of all. The Goal stresses regulation and monitoring of global financial markets and institutions.<p>**Company-Level Issue Examples** *- Responsible Lending, Worker Discrimination, CEO Pay Gap, Worker Pay Gap, Workplace Diversity and Inclusion.*| |**GOAL11SUSTAINABLECITIESANDCOMMUNITIES**|**Sustainable Cities and Communities** - Goal 11 seeks to ensure access for all to adequate, safe, and affordable housing and basic services, and green and public spaces, and to upgrade slums. It focuses on improving transportation, air quality and municipal and other waste management, and creating inclusive and sustainable urbanization through participatory urban planning. The Goal also supports safeguarding the world's cultural and natural heritage, while aiming to increase the number of cities and human settlements adopting and implementing integrated policies and plans towards inclusion, resource efficiency, mitigation and adaptation to climate change, and resilience to disasters.<p>**Company-Level Issue Examples** *- Air Pollution, Environmental Justice, Human Rights Violations, Affordable Housing.*| |**GOAL12RESPONSIBLECONSUMPTIONANDPRODUCTION**|**Responsible Consumption and Production** - Goal 12 aims to achieve the sustainable management and efficient use of natural resources through both the public and private sector. It specifically addresses global food waste in consumption, production, and distribution, sustainable tourism, waste and chemicals management. Goal 12 encourages sustainability reporting in the private sector, while in the public sector it encourages restructuring taxation and subsidies for fossil fuels and promoting sustainable public procurement practices.<p>**Company-Level Issue Examples** *- Sustainability Reporting, Circular Economy, Hazardous Waste Management, Waste Reduction.*| |**GOAL13CLIMATEACTION**|**Climate Action** - While Goal 13 is focused on actions by countries towards climate mitigation and adaptation, the private sector can also play a role in these areas. The goal seeks to strengthen resilience and adaptive capacity to climate-related hazards and natural disasters in all countries. It calls for integrating climate change measures, including those related to climate resilience and low GHG development, into national policies, strategies, and planning. It aims to improve education and awareness of climate change mitigation, adaptation, impact reduction, and early warning.<p>**Company-Level Issue Examples** *- GHG Emissions, Sustainable Transportation, Physical Climate Impacts.*| |**GOAL14LIFEBELOWWATER**|**Life Below Water** - Goal 14 focuses on preventing marine pollution of all kinds, particularly from land-based activities, and to minimize and address the impacts of ocean acidification. The Goal also aims to achieve sustainable yields in fisheries, through regulation of harvesting, controlling subsidies, and ending overfishing. It seeks to sustainably manage and protect marine and coastal ecosystems to avoid significant adverse impacts, including by strengthening their resilience, and take action for their restoration in order to achieve healthy and productive oceans.<p>**Company-Level Issue Examples** *- Impacts on water-related endangered species and habitats, Oil Spills, Seafood Sourcing.*| |**GOAL15LIFEONLAND**|**Life On Land** - Goal 15 seeks to ensure the conservation, restoration, and sustainable use of terrestrial and inland freshwater ecosystems and their services, in order to preserve biodiversity. It focuses specifically on sustainably managing forests, halting deforestation, restoring degraded lands and successfully combating desertification, reducing degraded natural habitats and ending biodiversity loss, with an emphasis on threatened species and invasive alien species.<p>**Company-Level Issue Examples** *- Impacts on land-related endangered species and habitats, Sustainable forestry practices and certifications, Project lifecycle environmental impacts.*| |**GOAL16PEACEJUSTICEANDSTRONGINSTITUTIONS**|**Peace, Justice, and Strong Institutions** - Goal 16 aims to significantly reduce all forms of violence, and also focuses specifically on reducing violence against children in the forms of abuse, exploitation, trafficking, and torture. It also aims to significantly reduce illicit financial and arms flows and to substantially reduce corruption and bribery in all their forms. The Goal also emphasizes effective and transparent institutions at all levels, inclusive and participatory decision-making, ensuring public access to information, and protection of fundamental freedoms.<p>**Company-Level Issue Examples** *- Tax Avoidance, Anti-Competitive Behavior, Cyber Security, Corruption, ESG Resolutions.*| . [optional] if omitted the server will use the default value of ["IMPACT"]
frequency (str): Controls the display frequency of the data returned. * **D** = Daily data (to receive most recent day's values, you must use daily frequency). * **W** = Weekly, End of week as of UTC Sunday at 12 AM. * **M** = Monthly, End of month as of UTC 12 AM. * **CY** = Yearly, End of year as of UTC 12 AM. . [optional] if omitted the server will use the default value of "M"
_preload_content (bool): if False, the urllib3.HTTPResponse object
will be returned without reading/decoding response data.
Default is True.
_request_timeout (int/float/tuple): timeout setting for this request. If
one number provided, it will be total request timeout. It can also
be a pair (tuple) of (connection, read) timeouts.
Default is None.
_check_input_type (bool): specifies if type checking
should be done one the data sent to the server.
Default is True.
_check_return_type (bool): specifies if type checking
should be done one the data received from the server.
Default is True.
_spec_property_naming (bool): True if the variable names in the input data
are serialized names, as specified in the OpenAPI document.
False if the variable names in the input data
are pythonic names, e.g. snake case (default)
_content_type (str/None): force body content-type.
Default is None and content-type will be predicted by allowed
content-types and body.
_host_index (int/None): specifies the index of the server
that we want to use.
Default is read from the configuration.
Returns:
SdgScoresResponse
Response Object
"""
self.apply_kwargs_defaults(kwargs=kwargs, return_http_data_only=True, async_req=False)
kwargs['ids'] = \
ids
kwargs['start_date'] = \
start_date
kwargs['end_date'] = \
end_date
return self.get_sdg_scores_endpoint.call_with_http_info(**kwargs)
def get_sdg_scores_with_http_info(
self,
ids,
start_date,
end_date,
**kwargs
) -> typing.Tuple[SdgScoresResponse, int, typing.MutableMapping]:
"""Gets short-term, long-term, and momentum scores based on the 16 Sustainable Development Goals categories defined by the United Nations. # noqa: E501
Truvalue Labs SDG Scores provides short-term, long-term, and momentum scores that are generated for 16 Sustainable Development Goals categories defined by the United Nations. # noqa: E501
This method makes a synchronous HTTP request. Returns http data, http status and headers
Args:
ids ([str]): Security or Entity identifiers. ISIN, Truvalue Identifiers, instrumentTvlId, instrumentPermId, and Tickers are accepted inputs. <p>***ids limit** = 1500 per request*</p> *<p>Make note, GET Method URL request lines are also limited to a total length of 8192 bytes (8KB). In cases where the service allows for thousands of ids, which may lead to exceeding this request line limit of 8KB, its advised for any requests with large request lines to be requested through the respective \"POST\" method.</p>*
start_date (str): TThe start date requested for a given date range in YYYY-MM-DD format. The input start date must be before the input end date. Future dates (T+1) are not accepted in this endpoint.
end_date (str): The end date requested for a given date range in YYYY-MM-DD format. The input end date must be after the input start date. Future dates (T+1) are not accepted in this endpoint.
Keyword Args:
score_types ([str]): The FactSet Truvalue Score types being requested. Score Types include the Pulse, Insight, Momentum, Article Volume, Category Volume, Dynamic Materiality Score, Adjusted Insight, and Industry Percentile. When left blank all score types will be returned for the requested Id. To specify select scores returned in the response, provide a comma-separated list of the scores using the description below. <p>These scores use the same underlying data and 100-point scale, except for ranks and percentiles. The cognitive computing system behind FactSet's Truvalue Platform uses natural language processing to interpret semantic content and generate analytics. It does so by applying criteria that are consistent with established sustainability and ESG frameworks, scoring data points on performance using a 0 to 100 scale. A score of 50 represents a neutral impact. Scores above 50 indicate positive performance, and scores below reflect negative performance.</p> ### SDG Score Types |**SDG Category Input**|**Description**| |---|---| |**PULSE**|*Pulse Score*, is a measure of near-term performance changes that highlights opportunities and controversies, enabling real-time monitoring of companies. It focuses on events of the day and provides a responsive signal to alert investors to dynamic moves.| |**INSIGHT**|*Insight Score*, a measure of a company's longer-term SDG track record, similar to a ratings system. Scores are less sensitive to daily events and reflect the enduring performance record of a company over time. Scores are derived using an exponentially-weighted moving average of the Pulse, defined below, and the half-life of an event's influence on the overall score is 6 months.| |**MOMENTUM**|*Momentum Score*, measures the trend of a company's Insight score. It is a unique ESG metric in the industry that gives investors a high-precision view of the trajectory of a company's SDG performance, relative to peers. It does so by precisely articulating upward or downward movement, relative to that of all others in the universe, making it a measure that enhances quantitative workflows.| |**DYNAMIC_MAT**|*Dynamic Materiality Score*, shows the percentage value of data flow by category compared with the total data flow for the organization | |**ADJ_INSIGHT**|*Adjusted Insight*, Measures company SDG performance, generating scores for lower-volume and zero-volume firms by blending company scores with industry medians. **(ONLY Vaild for `IMPACT` category)**. <p>**Adjusted Insight Score = (# of articles / 5) x Company Insight Score + ((5 - # of articles) / 5) x Industry (or Sector) Median Insight Score.</p>| |**IND_PCTL**|*Industry Percentile*, offers context on company Adjusted Insight scores relative to peers in the same SICS Industry. This value is used to establish the textual ESG Rank. **(ONLY Vaild for `IMPACT` category).**| |**TTM_VOLUME**|*TTM Volume*, measures the data flow over a trailing twelve month period.| |**SDG_RANK**|*SDG Rank*, offers rank leveraging the 16 ESG categories defined by Sustainable Development Goals (SDG).The datatype for the data returned when this score types is requested is String**(ONLY Vaild for `IMPACT` category)**.| . [optional] if omitted the server will use the default value of ["ALL"]
categories ([str]): The SDG Categories specified for the Truvalue Scores being requested. To specify select categories returned in the response, provide a comma-separated list of the scores using the description below. |**SDG Category Input**|**Description**| |---|---| |**IMPACT**|**Impact** - The aggregate SDG score for each company is simply named \"Impact.\" The SDG Impact Score is produced using a weighted average of individual category scores, where the weight utilized is the category score volume.| |**IMPACTARTICLES**|**Impact Articles** - The All Goals Category Volume measures the total number of times any of the 16 goals received a score over a trailing twelve-month (TTM) period of time. **( Data wil be returned only for `TTM_VOLUME` score type)**| |**GOAL1NOPOVERTY**|**No Poverty** - Goal 1 focuses on poverty in all its manifestations and also aims to ensure social protection for the poor and vulnerable, increase access to basic services and support people harmed by climate related extreme events and other economic, social and environmental shocks and disasters. <p>**Company-Level Issue Examples** *- Financial services access and affordability, Underserved groups,Unethical pricing.*| |**GOAL2ZEROHUNGER**|**Zero Hunger** - Goal 2 aims to end hunger and all forms of malnutrition and commits to universal access to safe, nutritious and sufficient food at all times of the year, particularly for the poor and people in vulnerable situations (e.g., infants). This will require sustainable food production systems and resilient agricultural practices, equal access to land, technology, and markets and international cooperation on investments in infrastructure and technology to boost agricultural productivity. <p>**Company-Level Issue Examples** *- Sustainable agricultural practices, Agricultural ingredients sourcing and certifications, Food safety concerns, Animal welfare.*| |**GOAL3GOODHEALTHANDWELLBEING**|**Good Health and Wellbeing** - Goal 3 seeks to ensure health and wellbeing for all, at every stage of life and addresses all major health priorities, including reproductive, maternal, and child health; communicable, noncommunicable and environmental diseases; universal health coverage; and access for all to safe, effective, quality, and affordable medicines and vaccines.<p> **Company-Level Issue Examples** *- Harmful Chemicals in Products, Product Recalls, Healthcare Access and Affordability.*| |**GOAL4QUALITYEDUCATION**|**Quality Education** - Goal 4 addresses access and affordability of education and skills development starting from childhood development and continuing through adulthood, including for girls, persons with disabilities, indigenous peoples and children in vulnerable situations, Improvements to the access to education it hopes to ensure that all youth and a substantial proportion of adults achieve literacy and numeracy. It also seeks to build and upgrade education facilities and to increase the supply of qualified teachers.<p>**Company-Level Issue Examples** *- Mentorship and training, Education company quality, Education company ethics.*| |**GOAL5GENDEREQUALITY**|**Gender Equality** - Goal 5 emphasizes eliminating discrimination and violence against women and girls. The Goal emphasizes ensuring women's full and effective participation and equal opportunities for leadership at all levels of decision-making in political, economic and public life. Access to sexual and reproductive health and reproductive rights and access to economic resources (e.g., land ownership, financial services) are also emphasized.<p>**Company-Level Issue Examples** *- Board Diversity, Gender Discrimination, Sexual Harassment.*| |**GOAL6CLEANWATERANDSANITATION**|**Clean Water and Sanitation** - Goal 6 not only addresses issues relating to drinking water, sanitation and hygiene, but also the quality and sustainability of water resources worldwide. It strives to achieve universal and equitable access to safe and affordable drinking water for all. It also focuses on adequate and equitable sanitation and hygiene and reducing pollution, minimizing release of hazardous chemicals and materials, and protection of water-related ecosystems. It also highlights increasing water-use efficiency across all sectors, recycling, and ensuring sustainable withdrawals and supply of freshwater.<p>**Company-Level Issue Examples** *- Water Pollution, Water Recycling and Stewardship, Water Infrastructure.*| |**GOAL7AFFORDABLEANDCLEANENERGY**|**Goal 7 Affordable and Clean Energy** - Goal 7 seeks to ensure access to affordable, reliable, and modern energy services for all. It aims to increase renewable energy in the global energy mix and improve energy efficiency significantly. It also calls for more access to clean energy research, technology, and infrastructure for renewable energy, energy efficiency, and advanced and cleaner fossil-fuel technology, and promoting investment in energy infrastructure and clean energy technology.<p>**Company-Level Issue Examples** *- Green Buildings, Renewable Energy, Unethical Utility Pricing.*| |**GOAL8DECENTWORKANDECONOMICGROWTH**|**Decent Work and Economic Growth** - Goal 8 focuses on economic productivity and supports policies for entrepreneurship, creativity and innovation that assist micro, small, and medium-sized enterprises. The Goal also seeks to reduce unemployment, the proportion of youth not working or in school, child labor, and forced labor. Also covered are the protection of labor rights, migrant workers, safe and secure working environments, sustainable tourism, and increasing the capacity of domestic financial institutions in regards to access to banking, insurance, and financial services.<p>**Company-Level Issue Examples** *- Job Creation, Labor Exploitation, Employee Health and Safety, Workplace Turnover, Supplier Transparency.*| |**GOAL9INDUSTRYINNOVATIONANDINFRASTRUCTURE**|**Industry Innovation and Infrastructure** - Goal 9 focuses on three important aspects of sustainable development, infrastructure, industrialization and innovation, including considerations for resiliency, equity, quality, reliability, access and affordability, and regional and transborder infrastructure. The Goal focuses on infrastructure upgrades and retrofitting of industries with increased resource-use efficiency and clean and environmentally sound technologies and industrial processes.<p>**Company-Level Issue Examples** *- Digital Divide, ESG integration in financial services, Engineering Structural Integrity.*| |**GOAL10REDUCEDINEQUALITIES**|**Reduced Inequalities** - Goal 10 calls for reducing inequalities in income as well as those based on age, sex, disability, race, ethnicity, origin, religion, or economic or other status within a country. The Goal addresses inequalities among countries, including those related to representation, migration, and development assistance. It aims to empower and promote social, economic, and political inclusion of all. The Goal stresses regulation and monitoring of global financial markets and institutions.<p>**Company-Level Issue Examples** *- Responsible Lending, Worker Discrimination, CEO Pay Gap, Worker Pay Gap, Workplace Diversity and Inclusion.*| |**GOAL11SUSTAINABLECITIESANDCOMMUNITIES**|**Sustainable Cities and Communities** - Goal 11 seeks to ensure access for all to adequate, safe, and affordable housing and basic services, and green and public spaces, and to upgrade slums. It focuses on improving transportation, air quality and municipal and other waste management, and creating inclusive and sustainable urbanization through participatory urban planning. The Goal also supports safeguarding the world's cultural and natural heritage, while aiming to increase the number of cities and human settlements adopting and implementing integrated policies and plans towards inclusion, resource efficiency, mitigation and adaptation to climate change, and resilience to disasters.<p>**Company-Level Issue Examples** *- Air Pollution, Environmental Justice, Human Rights Violations, Affordable Housing.*| |**GOAL12RESPONSIBLECONSUMPTIONANDPRODUCTION**|**Responsible Consumption and Production** - Goal 12 aims to achieve the sustainable management and efficient use of natural resources through both the public and private sector. It specifically addresses global food waste in consumption, production, and distribution, sustainable tourism, waste and chemicals management. Goal 12 encourages sustainability reporting in the private sector, while in the public sector it encourages restructuring taxation and subsidies for fossil fuels and promoting sustainable public procurement practices.<p>**Company-Level Issue Examples** *- Sustainability Reporting, Circular Economy, Hazardous Waste Management, Waste Reduction.*| |**GOAL13CLIMATEACTION**|**Climate Action** - While Goal 13 is focused on actions by countries towards climate mitigation and adaptation, the private sector can also play a role in these areas. The goal seeks to strengthen resilience and adaptive capacity to climate-related hazards and natural disasters in all countries. It calls for integrating climate change measures, including those related to climate resilience and low GHG development, into national policies, strategies, and planning. It aims to improve education and awareness of climate change mitigation, adaptation, impact reduction, and early warning.<p>**Company-Level Issue Examples** *- GHG Emissions, Sustainable Transportation, Physical Climate Impacts.*| |**GOAL14LIFEBELOWWATER**|**Life Below Water** - Goal 14 focuses on preventing marine pollution of all kinds, particularly from land-based activities, and to minimize and address the impacts of ocean acidification. The Goal also aims to achieve sustainable yields in fisheries, through regulation of harvesting, controlling subsidies, and ending overfishing. It seeks to sustainably manage and protect marine and coastal ecosystems to avoid significant adverse impacts, including by strengthening their resilience, and take action for their restoration in order to achieve healthy and productive oceans.<p>**Company-Level Issue Examples** *- Impacts on water-related endangered species and habitats, Oil Spills, Seafood Sourcing.*| |**GOAL15LIFEONLAND**|**Life On Land** - Goal 15 seeks to ensure the conservation, restoration, and sustainable use of terrestrial and inland freshwater ecosystems and their services, in order to preserve biodiversity. It focuses specifically on sustainably managing forests, halting deforestation, restoring degraded lands and successfully combating desertification, reducing degraded natural habitats and ending biodiversity loss, with an emphasis on threatened species and invasive alien species.<p>**Company-Level Issue Examples** *- Impacts on land-related endangered species and habitats, Sustainable forestry practices and certifications, Project lifecycle environmental impacts.*| |**GOAL16PEACEJUSTICEANDSTRONGINSTITUTIONS**|**Peace, Justice, and Strong Institutions** - Goal 16 aims to significantly reduce all forms of violence, and also focuses specifically on reducing violence against children in the forms of abuse, exploitation, trafficking, and torture. It also aims to significantly reduce illicit financial and arms flows and to substantially reduce corruption and bribery in all their forms. The Goal also emphasizes effective and transparent institutions at all levels, inclusive and participatory decision-making, ensuring public access to information, and protection of fundamental freedoms.<p>**Company-Level Issue Examples** *- Tax Avoidance, Anti-Competitive Behavior, Cyber Security, Corruption, ESG Resolutions.*| . [optional] if omitted the server will use the default value of ["IMPACT"]
frequency (str): Controls the display frequency of the data returned. * **D** = Daily data (to receive most recent day's values, you must use daily frequency). * **W** = Weekly, End of week as of UTC Sunday at 12 AM. * **M** = Monthly, End of month as of UTC 12 AM. * **CY** = Yearly, End of year as of UTC 12 AM. . [optional] if omitted the server will use the default value of "M"
_preload_content (bool): if False, the urllib3.HTTPResponse object
will be returned without reading/decoding response data.
Default is True.
_request_timeout (int/float/tuple): timeout setting for this request. If
one number provided, it will be total request timeout. It can also
be a pair (tuple) of (connection, read) timeouts.
Default is None.
_check_input_type (bool): specifies if type checking
should be done one the data sent to the server.
Default is True.
_check_return_type (bool): specifies if type checking
should be done one the data received from the server.
Default is True.
_spec_property_naming (bool): True if the variable names in the input data
are serialized names, as specified in the OpenAPI document.
False if the variable names in the input data
are pythonic names, e.g. snake case (default)
_content_type (str/None): force body content-type.
Default is None and content-type will be predicted by allowed
content-types and body.
_host_index (int/None): specifies the index of the server
that we want to use.
Default is read from the configuration.
Returns:
SdgScoresResponse
Response Object
int
Http Status Code
dict
Dictionary of the response headers
"""
self.apply_kwargs_defaults(kwargs=kwargs, return_http_data_only=False, async_req=False)
kwargs['ids'] = \
ids
kwargs['start_date'] = \
start_date
kwargs['end_date'] = \
end_date
return self.get_sdg_scores_endpoint.call_with_http_info(**kwargs)
def get_sdg_scores_async(
self,
ids,
start_date,
end_date,
**kwargs
) -> "ApplyResult[SdgScoresResponse]":
"""Gets short-term, long-term, and momentum scores based on the 16 Sustainable Development Goals categories defined by the United Nations. # noqa: E501
Truvalue Labs SDG Scores provides short-term, long-term, and momentum scores that are generated for 16 Sustainable Development Goals categories defined by the United Nations. # noqa: E501
This method makes a asynchronous HTTP request. Returns the http data, wrapped in ApplyResult
Args:
ids ([str]): Security or Entity identifiers. ISIN, Truvalue Identifiers, instrumentTvlId, instrumentPermId, and Tickers are accepted inputs. <p>***ids limit** = 1500 per request*</p> *<p>Make note, GET Method URL request lines are also limited to a total length of 8192 bytes (8KB). In cases where the service allows for thousands of ids, which may lead to exceeding this request line limit of 8KB, its advised for any requests with large request lines to be requested through the respective \"POST\" method.</p>*
start_date (str): TThe start date requested for a given date range in YYYY-MM-DD format. The input start date must be before the input end date. Future dates (T+1) are not accepted in this endpoint.
end_date (str): The end date requested for a given date range in YYYY-MM-DD format. The input end date must be after the input start date. Future dates (T+1) are not accepted in this endpoint.
Keyword Args:
score_types ([str]): The FactSet Truvalue Score types being requested. Score Types include the Pulse, Insight, Momentum, Article Volume, Category Volume, Dynamic Materiality Score, Adjusted Insight, and Industry Percentile. When left blank all score types will be returned for the requested Id. To specify select scores returned in the response, provide a comma-separated list of the scores using the description below. <p>These scores use the same underlying data and 100-point scale, except for ranks and percentiles. The cognitive computing system behind FactSet's Truvalue Platform uses natural language processing to interpret semantic content and generate analytics. It does so by applying criteria that are consistent with established sustainability and ESG frameworks, scoring data points on performance using a 0 to 100 scale. A score of 50 represents a neutral impact. Scores above 50 indicate positive performance, and scores below reflect negative performance.</p> ### SDG Score Types |**SDG Category Input**|**Description**| |---|---| |**PULSE**|*Pulse Score*, is a measure of near-term performance changes that highlights opportunities and controversies, enabling real-time monitoring of companies. It focuses on events of the day and provides a responsive signal to alert investors to dynamic moves.| |**INSIGHT**|*Insight Score*, a measure of a company's longer-term SDG track record, similar to a ratings system. Scores are less sensitive to daily events and reflect the enduring performance record of a company over time. Scores are derived using an exponentially-weighted moving average of the Pulse, defined below, and the half-life of an event's influence on the overall score is 6 months.| |**MOMENTUM**|*Momentum Score*, measures the trend of a company's Insight score. It is a unique ESG metric in the industry that gives investors a high-precision view of the trajectory of a company's SDG performance, relative to peers. It does so by precisely articulating upward or downward movement, relative to that of all others in the universe, making it a measure that enhances quantitative workflows.| |**DYNAMIC_MAT**|*Dynamic Materiality Score*, shows the percentage value of data flow by category compared with the total data flow for the organization | |**ADJ_INSIGHT**|*Adjusted Insight*, Measures company SDG performance, generating scores for lower-volume and zero-volume firms by blending company scores with industry medians. **(ONLY Vaild for `IMPACT` category)**. <p>**Adjusted Insight Score = (# of articles / 5) x Company Insight Score + ((5 - # of articles) / 5) x Industry (or Sector) Median Insight Score.</p>| |**IND_PCTL**|*Industry Percentile*, offers context on company Adjusted Insight scores relative to peers in the same SICS Industry. This value is used to establish the textual ESG Rank. **(ONLY Vaild for `IMPACT` category).**| |**TTM_VOLUME**|*TTM Volume*, measures the data flow over a trailing twelve month period.| |**SDG_RANK**|*SDG Rank*, offers rank leveraging the 16 ESG categories defined by Sustainable Development Goals (SDG).The datatype for the data returned when this score types is requested is String**(ONLY Vaild for `IMPACT` category)**.| . [optional] if omitted the server will use the default value of ["ALL"]
categories ([str]): The SDG Categories specified for the Truvalue Scores being requested. To specify select categories returned in the response, provide a comma-separated list of the scores using the description below. |**SDG Category Input**|**Description**| |---|---| |**IMPACT**|**Impact** - The aggregate SDG score for each company is simply named \"Impact.\" The SDG Impact Score is produced using a weighted average of individual category scores, where the weight utilized is the category score volume.| |**IMPACTARTICLES**|**Impact Articles** - The All Goals Category Volume measures the total number of times any of the 16 goals received a score over a trailing twelve-month (TTM) period of time. **( Data wil be returned only for `TTM_VOLUME` score type)**| |**GOAL1NOPOVERTY**|**No Poverty** - Goal 1 focuses on poverty in all its manifestations and also aims to ensure social protection for the poor and vulnerable, increase access to basic services and support people harmed by climate related extreme events and other economic, social and environmental shocks and disasters. <p>**Company-Level Issue Examples** *- Financial services access and affordability, Underserved groups,Unethical pricing.*| |**GOAL2ZEROHUNGER**|**Zero Hunger** - Goal 2 aims to end hunger and all forms of malnutrition and commits to universal access to safe, nutritious and sufficient food at all times of the year, particularly for the poor and people in vulnerable situations (e.g., infants). This will require sustainable food production systems and resilient agricultural practices, equal access to land, technology, and markets and international cooperation on investments in infrastructure and technology to boost agricultural productivity. <p>**Company-Level Issue Examples** *- Sustainable agricultural practices, Agricultural ingredients sourcing and certifications, Food safety concerns, Animal welfare.*| |**GOAL3GOODHEALTHANDWELLBEING**|**Good Health and Wellbeing** - Goal 3 seeks to ensure health and wellbeing for all, at every stage of life and addresses all major health priorities, including reproductive, maternal, and child health; communicable, noncommunicable and environmental diseases; universal health coverage; and access for all to safe, effective, quality, and affordable medicines and vaccines.<p> **Company-Level Issue Examples** *- Harmful Chemicals in Products, Product Recalls, Healthcare Access and Affordability.*| |**GOAL4QUALITYEDUCATION**|**Quality Education** - Goal 4 addresses access and affordability of education and skills development starting from childhood development and continuing through adulthood, including for girls, persons with disabilities, indigenous peoples and children in vulnerable situations, Improvements to the access to education it hopes to ensure that all youth and a substantial proportion of adults achieve literacy and numeracy. It also seeks to build and upgrade education facilities and to increase the supply of qualified teachers.<p>**Company-Level Issue Examples** *- Mentorship and training, Education company quality, Education company ethics.*| |**GOAL5GENDEREQUALITY**|**Gender Equality** - Goal 5 emphasizes eliminating discrimination and violence against women and girls. The Goal emphasizes ensuring women's full and effective participation and equal opportunities for leadership at all levels of decision-making in political, economic and public life. Access to sexual and reproductive health and reproductive rights and access to economic resources (e.g., land ownership, financial services) are also emphasized.<p>**Company-Level Issue Examples** *- Board Diversity, Gender Discrimination, Sexual Harassment.*| |**GOAL6CLEANWATERANDSANITATION**|**Clean Water and Sanitation** - Goal 6 not only addresses issues relating to drinking water, sanitation and hygiene, but also the quality and sustainability of water resources worldwide. It strives to achieve universal and equitable access to safe and affordable drinking water for all. It also focuses on adequate and equitable sanitation and hygiene and reducing pollution, minimizing release of hazardous chemicals and materials, and protection of water-related ecosystems. It also highlights increasing water-use efficiency across all sectors, recycling, and ensuring sustainable withdrawals and supply of freshwater.<p>**Company-Level Issue Examples** *- Water Pollution, Water Recycling and Stewardship, Water Infrastructure.*| |**GOAL7AFFORDABLEANDCLEANENERGY**|**Goal 7 Affordable and Clean Energy** - Goal 7 seeks to ensure access to affordable, reliable, and modern energy services for all. It aims to increase renewable energy in the global energy mix and improve energy efficiency significantly. It also calls for more access to clean energy research, technology, and infrastructure for renewable energy, energy efficiency, and advanced and cleaner fossil-fuel technology, and promoting investment in energy infrastructure and clean energy technology.<p>**Company-Level Issue Examples** *- Green Buildings, Renewable Energy, Unethical Utility Pricing.*| |**GOAL8DECENTWORKANDECONOMICGROWTH**|**Decent Work and Economic Growth** - Goal 8 focuses on economic productivity and supports policies for entrepreneurship, creativity and innovation that assist micro, small, and medium-sized enterprises. The Goal also seeks to reduce unemployment, the proportion of youth not working or in school, child labor, and forced labor. Also covered are the protection of labor rights, migrant workers, safe and secure working environments, sustainable tourism, and increasing the capacity of domestic financial institutions in regards to access to banking, insurance, and financial services.<p>**Company-Level Issue Examples** *- Job Creation, Labor Exploitation, Employee Health and Safety, Workplace Turnover, Supplier Transparency.*| |**GOAL9INDUSTRYINNOVATIONANDINFRASTRUCTURE**|**Industry Innovation and Infrastructure** - Goal 9 focuses on three important aspects of sustainable development, infrastructure, industrialization and innovation, including considerations for resiliency, equity, quality, reliability, access and affordability, and regional and transborder infrastructure. The Goal focuses on infrastructure upgrades and retrofitting of industries with increased resource-use efficiency and clean and environmentally sound technologies and industrial processes.<p>**Company-Level Issue Examples** *- Digital Divide, ESG integration in financial services, Engineering Structural Integrity.*| |**GOAL10REDUCEDINEQUALITIES**|**Reduced Inequalities** - Goal 10 calls for reducing inequalities in income as well as those based on age, sex, disability, race, ethnicity, origin, religion, or economic or other status within a country. The Goal addresses inequalities among countries, including those related to representation, migration, and development assistance. It aims to empower and promote social, economic, and political inclusion of all. The Goal stresses regulation and monitoring of global financial markets and institutions.<p>**Company-Level Issue Examples** *- Responsible Lending, Worker Discrimination, CEO Pay Gap, Worker Pay Gap, Workplace Diversity and Inclusion.*| |**GOAL11SUSTAINABLECITIESANDCOMMUNITIES**|**Sustainable Cities and Communities** - Goal 11 seeks to ensure access for all to adequate, safe, and affordable housing and basic services, and green and public spaces, and to upgrade slums. It focuses on improving transportation, air quality and municipal and other waste management, and creating inclusive and sustainable urbanization through participatory urban planning. The Goal also supports safeguarding the world's cultural and natural heritage, while aiming to increase the number of cities and human settlements adopting and implementing integrated policies and plans towards inclusion, resource efficiency, mitigation and adaptation to climate change, and resilience to disasters.<p>**Company-Level Issue Examples** *- Air Pollution, Environmental Justice, Human Rights Violations, Affordable Housing.*| |**GOAL12RESPONSIBLECONSUMPTIONANDPRODUCTION**|**Responsible Consumption and Production** - Goal 12 aims to achieve the sustainable management and efficient use of natural resources through both the public and private sector. It specifically addresses global food waste in consumption, production, and distribution, sustainable tourism, waste and chemicals management. Goal 12 encourages sustainability reporting in the private sector, while in the public sector it encourages restructuring taxation and subsidies for fossil fuels and promoting sustainable public procurement practices.<p>**Company-Level Issue Examples** *- Sustainability Reporting, Circular Economy, Hazardous Waste Management, Waste Reduction.*| |**GOAL13CLIMATEACTION**|**Climate Action** - While Goal 13 is focused on actions by countries towards climate mitigation and adaptation, the private sector can also play a role in these areas. The goal seeks to strengthen resilience and adaptive capacity to climate-related hazards and natural disasters in all countries. It calls for integrating climate change measures, including those related to climate resilience and low GHG development, into national policies, strategies, and planning. It aims to improve education and awareness of climate change mitigation, adaptation, impact reduction, and early warning.<p>**Company-Level Issue Examples** *- GHG Emissions, Sustainable Transportation, Physical Climate Impacts.*| |**GOAL14LIFEBELOWWATER**|**Life Below Water** - Goal 14 focuses on preventing marine pollution of all kinds, particularly from land-based activities, and to minimize and address the impacts of ocean acidification. The Goal also aims to achieve sustainable yields in fisheries, through regulation of harvesting, controlling subsidies, and ending overfishing. It seeks to sustainably manage and protect marine and coastal ecosystems to avoid significant adverse impacts, including by strengthening their resilience, and take action for their restoration in order to achieve healthy and productive oceans.<p>**Company-Level Issue Examples** *- Impacts on water-related endangered species and habitats, Oil Spills, Seafood Sourcing.*| |**GOAL15LIFEONLAND**|**Life On Land** - Goal 15 seeks to ensure the conservation, restoration, and sustainable use of terrestrial and inland freshwater ecosystems and their services, in order to preserve biodiversity. It focuses specifically on sustainably managing forests, halting deforestation, restoring degraded lands and successfully combating desertification, reducing degraded natural habitats and ending biodiversity loss, with an emphasis on threatened species and invasive alien species.<p>**Company-Level Issue Examples** *- Impacts on land-related endangered species and habitats, Sustainable forestry practices and certifications, Project lifecycle environmental impacts.*| |**GOAL16PEACEJUSTICEANDSTRONGINSTITUTIONS**|**Peace, Justice, and Strong Institutions** - Goal 16 aims to significantly reduce all forms of violence, and also focuses specifically on reducing violence against children in the forms of abuse, exploitation, trafficking, and torture. It also aims to significantly reduce illicit financial and arms flows and to substantially reduce corruption and bribery in all their forms. The Goal also emphasizes effective and transparent institutions at all levels, inclusive and participatory decision-making, ensuring public access to information, and protection of fundamental freedoms.<p>**Company-Level Issue Examples** *- Tax Avoidance, Anti-Competitive Behavior, Cyber Security, Corruption, ESG Resolutions.*| . [optional] if omitted the server will use the default value of ["IMPACT"]
frequency (str): Controls the display frequency of the data returned. * **D** = Daily data (to receive most recent day's values, you must use daily frequency). * **W** = Weekly, End of week as of UTC Sunday at 12 AM. * **M** = Monthly, End of month as of UTC 12 AM. * **CY** = Yearly, End of year as of UTC 12 AM. . [optional] if omitted the server will use the default value of "M"
_preload_content (bool): if False, the urllib3.HTTPResponse object
will be returned without reading/decoding response data.
Default is True.
_request_timeout (int/float/tuple): timeout setting for this request. If
one number provided, it will be total request timeout. It can also
be a pair (tuple) of (connection, read) timeouts.
Default is None.
_check_input_type (bool): specifies if type checking
should be done one the data sent to the server.
Default is True.
_check_return_type (bool): specifies if type checking
should be done one the data received from the server.
Default is True.
_spec_property_naming (bool): True if the variable names in the input data
are serialized names, as specified in the OpenAPI document.
False if the variable names in the input data
are pythonic names, e.g. snake case (default)
_content_type (str/None): force body content-type.
Default is None and content-type will be predicted by allowed
content-types and body.
_host_index (int/None): specifies the index of the server
that we want to use.
Default is read from the configuration.
Returns:
ApplyResult[SdgScoresResponse]
"""
self.apply_kwargs_defaults(kwargs=kwargs, return_http_data_only=True, async_req=True)
kwargs['ids'] = \
ids
kwargs['start_date'] = \
start_date
kwargs['end_date'] = \
end_date
return self.get_sdg_scores_endpoint.call_with_http_info(**kwargs)
def get_sdg_scores_with_http_info_async(
self,
ids,
start_date,
end_date,
**kwargs
) -> "ApplyResult[typing.Tuple[SdgScoresResponse, int, typing.MutableMapping]]":
"""Gets short-term, long-term, and momentum scores based on the 16 Sustainable Development Goals categories defined by the United Nations. # noqa: E501
Truvalue Labs SDG Scores provides short-term, long-term, and momentum scores that are generated for 16 Sustainable Development Goals categories defined by the United Nations. # noqa: E501
This method makes a asynchronous HTTP request. Returns http data, http status and headers, wrapped in ApplyResult
Args:
ids ([str]): Security or Entity identifiers. ISIN, Truvalue Identifiers, instrumentTvlId, instrumentPermId, and Tickers are accepted inputs. <p>***ids limit** = 1500 per request*</p> *<p>Make note, GET Method URL request lines are also limited to a total length of 8192 bytes (8KB). In cases where the service allows for thousands of ids, which may lead to exceeding this request line limit of 8KB, its advised for any requests with large request lines to be requested through the respective \"POST\" method.</p>*
start_date (str): TThe start date requested for a given date range in YYYY-MM-DD format. The input start date must be before the input end date. Future dates (T+1) are not accepted in this endpoint.
end_date (str): The end date requested for a given date range in YYYY-MM-DD format. The input end date must be after the input start date. Future dates (T+1) are not accepted in this endpoint.
Keyword Args:
score_types ([str]): The FactSet Truvalue Score types being requested. Score Types include the Pulse, Insight, Momentum, Article Volume, Category Volume, Dynamic Materiality Score, Adjusted Insight, and Industry Percentile. When left blank all score types will be returned for the requested Id. To specify select scores returned in the response, provide a comma-separated list of the scores using the description below. <p>These scores use the same underlying data and 100-point scale, except for ranks and percentiles. The cognitive computing system behind FactSet's Truvalue Platform uses natural language processing to interpret semantic content and generate analytics. It does so by applying criteria that are consistent with established sustainability and ESG frameworks, scoring data points on performance using a 0 to 100 scale. A score of 50 represents a neutral impact. Scores above 50 indicate positive performance, and scores below reflect negative performance.</p> ### SDG Score Types |**SDG Category Input**|**Description**| |---|---| |**PULSE**|*Pulse Score*, is a measure of near-term performance changes that highlights opportunities and controversies, enabling real-time monitoring of companies. It focuses on events of the day and provides a responsive signal to alert investors to dynamic moves.| |**INSIGHT**|*Insight Score*, a measure of a company's longer-term SDG track record, similar to a ratings system. Scores are less sensitive to daily events and reflect the enduring performance record of a company over time. Scores are derived using an exponentially-weighted moving average of the Pulse, defined below, and the half-life of an event's influence on the overall score is 6 months.| |**MOMENTUM**|*Momentum Score*, measures the trend of a company's Insight score. It is a unique ESG metric in the industry that gives investors a high-precision view of the trajectory of a company's SDG performance, relative to peers. It does so by precisely articulating upward or downward movement, relative to that of all others in the universe, making it a measure that enhances quantitative workflows.| |**DYNAMIC_MAT**|*Dynamic Materiality Score*, shows the percentage value of data flow by category compared with the total data flow for the organization | |**ADJ_INSIGHT**|*Adjusted Insight*, Measures company SDG performance, generating scores for lower-volume and zero-volume firms by blending company scores with industry medians. **(ONLY Vaild for `IMPACT` category)**. <p>**Adjusted Insight Score = (# of articles / 5) x Company Insight Score + ((5 - # of articles) / 5) x Industry (or Sector) Median Insight Score.</p>| |**IND_PCTL**|*Industry Percentile*, offers context on company Adjusted Insight scores relative to peers in the same SICS Industry. This value is used to establish the textual ESG Rank. **(ONLY Vaild for `IMPACT` category).**| |**TTM_VOLUME**|*TTM Volume*, measures the data flow over a trailing twelve month period.| |**SDG_RANK**|*SDG Rank*, offers rank leveraging the 16 ESG categories defined by Sustainable Development Goals (SDG).The datatype for the data returned when this score types is requested is String**(ONLY Vaild for `IMPACT` category)**.| . [optional] if omitted the server will use the default value of ["ALL"]
categories ([str]): The SDG Categories specified for the Truvalue Scores being requested. To specify select categories returned in the response, provide a comma-separated list of the scores using the description below. |**SDG Category Input**|**Description**| |---|---| |**IMPACT**|**Impact** - The aggregate SDG score for each company is simply named \"Impact.\" The SDG Impact Score is produced using a weighted average of individual category scores, where the weight utilized is the category score volume.| |**IMPACTARTICLES**|**Impact Articles** - The All Goals Category Volume measures the total number of times any of the 16 goals received a score over a trailing twelve-month (TTM) period of time. **( Data wil be returned only for `TTM_VOLUME` score type)**| |**GOAL1NOPOVERTY**|**No Poverty** - Goal 1 focuses on poverty in all its manifestations and also aims to ensure social protection for the poor and vulnerable, increase access to basic services and support people harmed by climate related extreme events and other economic, social and environmental shocks and disasters. <p>**Company-Level Issue Examples** *- Financial services access and affordability, Underserved groups,Unethical pricing.*| |**GOAL2ZEROHUNGER**|**Zero Hunger** - Goal 2 aims to end hunger and all forms of malnutrition and commits to universal access to safe, nutritious and sufficient food at all times of the year, particularly for the poor and people in vulnerable situations (e.g., infants). This will require sustainable food production systems and resilient agricultural practices, equal access to land, technology, and markets and international cooperation on investments in infrastructure and technology to boost agricultural productivity. <p>**Company-Level Issue Examples** *- Sustainable agricultural practices, Agricultural ingredients sourcing and certifications, Food safety concerns, Animal welfare.*| |**GOAL3GOODHEALTHANDWELLBEING**|**Good Health and Wellbeing** - Goal 3 seeks to ensure health and wellbeing for all, at every stage of life and addresses all major health priorities, including reproductive, maternal, and child health; communicable, noncommunicable and environmental diseases; universal health coverage; and access for all to safe, effective, quality, and affordable medicines and vaccines.<p> **Company-Level Issue Examples** *- Harmful Chemicals in Products, Product Recalls, Healthcare Access and Affordability.*| |**GOAL4QUALITYEDUCATION**|**Quality Education** - Goal 4 addresses access and affordability of education and skills development starting from childhood development and continuing through adulthood, including for girls, persons with disabilities, indigenous peoples and children in vulnerable situations, Improvements to the access to education it hopes to ensure that all youth and a substantial proportion of adults achieve literacy and numeracy. It also seeks to build and upgrade education facilities and to increase the supply of qualified teachers.<p>**Company-Level Issue Examples** *- Mentorship and training, Education company quality, Education company ethics.*| |**GOAL5GENDEREQUALITY**|**Gender Equality** - Goal 5 emphasizes eliminating discrimination and violence against women and girls. The Goal emphasizes ensuring women's full and effective participation and equal opportunities for leadership at all levels of decision-making in political, economic and public life. Access to sexual and reproductive health and reproductive rights and access to economic resources (e.g., land ownership, financial services) are also emphasized.<p>**Company-Level Issue Examples** *- Board Diversity, Gender Discrimination, Sexual Harassment.*| |**GOAL6CLEANWATERANDSANITATION**|**Clean Water and Sanitation** - Goal 6 not only addresses issues relating to drinking water, sanitation and hygiene, but also the quality and sustainability of water resources worldwide. It strives to achieve universal and equitable access to safe and affordable drinking water for all. It also focuses on adequate and equitable sanitation and hygiene and reducing pollution, minimizing release of hazardous chemicals and materials, and protection of water-related ecosystems. It also highlights increasing water-use efficiency across all sectors, recycling, and ensuring sustainable withdrawals and supply of freshwater.<p>**Company-Level Issue Examples** *- Water Pollution, Water Recycling and Stewardship, Water Infrastructure.*| |**GOAL7AFFORDABLEANDCLEANENERGY**|**Goal 7 Affordable and Clean Energy** - Goal 7 seeks to ensure access to affordable, reliable, and modern energy services for all. It aims to increase renewable energy in the global energy mix and improve energy efficiency significantly. It also calls for more access to clean energy research, technology, and infrastructure for renewable energy, energy efficiency, and advanced and cleaner fossil-fuel technology, and promoting investment in energy infrastructure and clean energy technology.<p>**Company-Level Issue Examples** *- Green Buildings, Renewable Energy, Unethical Utility Pricing.*| |**GOAL8DECENTWORKANDECONOMICGROWTH**|**Decent Work and Economic Growth** - Goal 8 focuses on economic productivity and supports policies for entrepreneurship, creativity and innovation that assist micro, small, and medium-sized enterprises. The Goal also seeks to reduce unemployment, the proportion of youth not working or in school, child labor, and forced labor. Also covered are the protection of labor rights, migrant workers, safe and secure working environments, sustainable tourism, and increasing the capacity of domestic financial institutions in regards to access to banking, insurance, and financial services.<p>**Company-Level Issue Examples** *- Job Creation, Labor Exploitation, Employee Health and Safety, Workplace Turnover, Supplier Transparency.*| |**GOAL9INDUSTRYINNOVATIONANDINFRASTRUCTURE**|**Industry Innovation and Infrastructure** - Goal 9 focuses on three important aspects of sustainable development, infrastructure, industrialization and innovation, including considerations for resiliency, equity, quality, reliability, access and affordability, and regional and transborder infrastructure. The Goal focuses on infrastructure upgrades and retrofitting of industries with increased resource-use efficiency and clean and environmentally sound technologies and industrial processes.<p>**Company-Level Issue Examples** *- Digital Divide, ESG integration in financial services, Engineering Structural Integrity.*| |**GOAL10REDUCEDINEQUALITIES**|**Reduced Inequalities** - Goal 10 calls for reducing inequalities in income as well as those based on age, sex, disability, race, ethnicity, origin, religion, or economic or other status within a country. The Goal addresses inequalities among countries, including those related to representation, migration, and development assistance. It aims to empower and promote social, economic, and political inclusion of all. The Goal stresses regulation and monitoring of global financial markets and institutions.<p>**Company-Level Issue Examples** *- Responsible Lending, Worker Discrimination, CEO Pay Gap, Worker Pay Gap, Workplace Diversity and Inclusion.*| |**GOAL11SUSTAINABLECITIESANDCOMMUNITIES**|**Sustainable Cities and Communities** - Goal 11 seeks to ensure access for all to adequate, safe, and affordable housing and basic services, and green and public spaces, and to upgrade slums. It focuses on improving transportation, air quality and municipal and other waste management, and creating inclusive and sustainable urbanization through participatory urban planning. The Goal also supports safeguarding the world's cultural and natural heritage, while aiming to increase the number of cities and human settlements adopting and implementing integrated policies and plans towards inclusion, resource efficiency, mitigation and adaptation to climate change, and resilience to disasters.<p>**Company-Level Issue Examples** *- Air Pollution, Environmental Justice, Human Rights Violations, Affordable Housing.*| |**GOAL12RESPONSIBLECONSUMPTIONANDPRODUCTION**|**Responsible Consumption and Production** - Goal 12 aims to achieve the sustainable management and efficient use of natural resources through both the public and private sector. It specifically addresses global food waste in consumption, production, and distribution, sustainable tourism, waste and chemicals management. Goal 12 encourages sustainability reporting in the private sector, while in the public sector it encourages restructuring taxation and subsidies for fossil fuels and promoting sustainable public procurement practices.<p>**Company-Level Issue Examples** *- Sustainability Reporting, Circular Economy, Hazardous Waste Management, Waste Reduction.*| |**GOAL13CLIMATEACTION**|**Climate Action** - While Goal 13 is focused on actions by countries towards climate mitigation and adaptation, the private sector can also play a role in these areas. The goal seeks to strengthen resilience and adaptive capacity to climate-related hazards and natural disasters in all countries. It calls for integrating climate change measures, including those related to climate resilience and low GHG development, into national policies, strategies, and planning. It aims to improve education and awareness of climate change mitigation, adaptation, impact reduction, and early warning.<p>**Company-Level Issue Examples** *- GHG Emissions, Sustainable Transportation, Physical Climate Impacts.*| |**GOAL14LIFEBELOWWATER**|**Life Below Water** - Goal 14 focuses on preventing marine pollution of all kinds, particularly from land-based activities, and to minimize and address the impacts of ocean acidification. The Goal also aims to achieve sustainable yields in fisheries, through regulation of harvesting, controlling subsidies, and ending overfishing. It seeks to sustainably manage and protect marine and coastal ecosystems to avoid significant adverse impacts, including by strengthening their resilience, and take action for their restoration in order to achieve healthy and productive oceans.<p>**Company-Level Issue Examples** *- Impacts on water-related endangered species and habitats, Oil Spills, Seafood Sourcing.*| |**GOAL15LIFEONLAND**|**Life On Land** - Goal 15 seeks to ensure the conservation, restoration, and sustainable use of terrestrial and inland freshwater ecosystems and their services, in order to preserve biodiversity. It focuses specifically on sustainably managing forests, halting deforestation, restoring degraded lands and successfully combating desertification, reducing degraded natural habitats and ending biodiversity loss, with an emphasis on threatened species and invasive alien species.<p>**Company-Level Issue Examples** *- Impacts on land-related endangered species and habitats, Sustainable forestry practices and certifications, Project lifecycle environmental impacts.*| |**GOAL16PEACEJUSTICEANDSTRONGINSTITUTIONS**|**Peace, Justice, and Strong Institutions** - Goal 16 aims to significantly reduce all forms of violence, and also focuses specifically on reducing violence against children in the forms of abuse, exploitation, trafficking, and torture. It also aims to significantly reduce illicit financial and arms flows and to substantially reduce corruption and bribery in all their forms. The Goal also emphasizes effective and transparent institutions at all levels, inclusive and participatory decision-making, ensuring public access to information, and protection of fundamental freedoms.<p>**Company-Level Issue Examples** *- Tax Avoidance, Anti-Competitive Behavior, Cyber Security, Corruption, ESG Resolutions.*| . [optional] if omitted the server will use the default value of ["IMPACT"]
frequency (str): Controls the display frequency of the data returned. * **D** = Daily data (to receive most recent day's values, you must use daily frequency). * **W** = Weekly, End of week as of UTC Sunday at 12 AM. * **M** = Monthly, End of month as of UTC 12 AM. * **CY** = Yearly, End of year as of UTC 12 AM. . [optional] if omitted the server will use the default value of "M"
_preload_content (bool): if False, the urllib3.HTTPResponse object
will be returned without reading/decoding response data.
Default is True.
_request_timeout (int/float/tuple): timeout setting for this request. If
one number provided, it will be total request timeout. It can also
be a pair (tuple) of (connection, read) timeouts.
Default is None.
_check_input_type (bool): specifies if type checking
should be done one the data sent to the server.
Default is True.
_check_return_type (bool): specifies if type checking
should be done one the data received from the server.
Default is True.
_spec_property_naming (bool): True if the variable names in the input data
are serialized names, as specified in the OpenAPI document.
False if the variable names in the input data
are pythonic names, e.g. snake case (default)
_content_type (str/None): force body content-type.
Default is None and content-type will be predicted by allowed
content-types and body.
_host_index (int/None): specifies the index of the server
that we want to use.
Default is read from the configuration.
Returns:
ApplyResult[(SdgScoresResponse, int, typing.Dict)]
"""
self.apply_kwargs_defaults(kwargs=kwargs, return_http_data_only=False, async_req=True)
kwargs['ids'] = \
ids
kwargs['start_date'] = \
start_date
kwargs['end_date'] = \
end_date
return self.get_sdg_scores_endpoint.call_with_http_info(**kwargs)
def get_sdg_scores_post(
self,
sdg_scores_request,
**kwargs
) -> SdgScoresResponse:
"""Gets short-term, long-term, and momentum scores based on the 16 Sustainable Development Goals categories defined by United Nations. # noqa: E501
Truvalue Labs SDG Scores provides short-term, long-term, and momentum scores that are generated for 16 Sustainable Development Goals categories defined by the United Nations.* # noqa: E501
This method makes a synchronous HTTP request. Returns the http data only
Args:
sdg_scores_request (SdgScoresRequest): The SDG Scores request body, allowing the user to specify a list of ids, Score types, SDG categories, date range, and frequency.
Keyword Args:
_preload_content (bool): if False, the urllib3.HTTPResponse object
will be returned without reading/decoding response data.
Default is True.
_request_timeout (int/float/tuple): timeout setting for this request. If
one number provided, it will be total request timeout. It can also
be a pair (tuple) of (connection, read) timeouts.
Default is None.
_check_input_type (bool): specifies if type checking
should be done one the data sent to the server.
Default is True.
_check_return_type (bool): specifies if type checking
should be done one the data received from the server.
Default is True.
_spec_property_naming (bool): True if the variable names in the input data
are serialized names, as specified in the OpenAPI document.
False if the variable names in the input data
are pythonic names, e.g. snake case (default)
_content_type (str/None): force body content-type.
Default is None and content-type will be predicted by allowed
content-types and body.
_host_index (int/None): specifies the index of the server
that we want to use.
Default is read from the configuration.
Returns:
SdgScoresResponse
Response Object
"""
self.apply_kwargs_defaults(kwargs=kwargs, return_http_data_only=True, async_req=False)
kwargs['sdg_scores_request'] = \
sdg_scores_request
return self.get_sdg_scores_post_endpoint.call_with_http_info(**kwargs)
def get_sdg_scores_post_with_http_info(
self,
sdg_scores_request,
**kwargs
) -> typing.Tuple[SdgScoresResponse, int, typing.MutableMapping]:
"""Gets short-term, long-term, and momentum scores based on the 16 Sustainable Development Goals categories defined by United Nations. # noqa: E501
Truvalue Labs SDG Scores provides short-term, long-term, and momentum scores that are generated for 16 Sustainable Development Goals categories defined by the United Nations.* # noqa: E501
This method makes a synchronous HTTP request. Returns http data, http status and headers
Args:
sdg_scores_request (SdgScoresRequest): The SDG Scores request body, allowing the user to specify a list of ids, Score types, SDG categories, date range, and frequency.
Keyword Args:
_preload_content (bool): if False, the urllib3.HTTPResponse object
will be returned without reading/decoding response data.
Default is True.
_request_timeout (int/float/tuple): timeout setting for this request. If
one number provided, it will be total request timeout. It can also
be a pair (tuple) of (connection, read) timeouts.
Default is None.
_check_input_type (bool): specifies if type checking
should be done one the data sent to the server.
Default is True.
_check_return_type (bool): specifies if type checking
should be done one the data received from the server.
Default is True.
_spec_property_naming (bool): True if the variable names in the input data
are serialized names, as specified in the OpenAPI document.
False if the variable names in the input data
are pythonic names, e.g. snake case (default)
_content_type (str/None): force body content-type.
Default is None and content-type will be predicted by allowed
content-types and body.
_host_index (int/None): specifies the index of the server
that we want to use.
Default is read from the configuration.
Returns:
SdgScoresResponse
Response Object
int
Http Status Code
dict
Dictionary of the response headers
"""
self.apply_kwargs_defaults(kwargs=kwargs, return_http_data_only=False, async_req=False)
kwargs['sdg_scores_request'] = \
sdg_scores_request
return self.get_sdg_scores_post_endpoint.call_with_http_info(**kwargs)
def get_sdg_scores_post_async(
self,
sdg_scores_request,
**kwargs
) -> "ApplyResult[SdgScoresResponse]":
"""Gets short-term, long-term, and momentum scores based on the 16 Sustainable Development Goals categories defined by United Nations. # noqa: E501
Truvalue Labs SDG Scores provides short-term, long-term, and momentum scores that are generated for 16 Sustainable Development Goals categories defined by the United Nations.* # noqa: E501
This method makes a asynchronous HTTP request. Returns the http data, wrapped in ApplyResult
Args:
sdg_scores_request (SdgScoresRequest): The SDG Scores request body, allowing the user to specify a list of ids, Score types, SDG categories, date range, and frequency.
Keyword Args:
_preload_content (bool): if False, the urllib3.HTTPResponse object
will be returned without reading/decoding response data.
Default is True.
_request_timeout (int/float/tuple): timeout setting for this request. If
one number provided, it will be total request timeout. It can also
be a pair (tuple) of (connection, read) timeouts.
Default is None.
_check_input_type (bool): specifies if type checking
should be done one the data sent to the server.
Default is True.
_check_return_type (bool): specifies if type checking
should be done one the data received from the server.
Default is True.
_spec_property_naming (bool): True if the variable names in the input data
are serialized names, as specified in the OpenAPI document.
False if the variable names in the input data
are pythonic names, e.g. snake case (default)
_content_type (str/None): force body content-type.
Default is None and content-type will be predicted by allowed
content-types and body.
_host_index (int/None): specifies the index of the server
that we want to use.
Default is read from the configuration.
Returns:
ApplyResult[SdgScoresResponse]
"""
self.apply_kwargs_defaults(kwargs=kwargs, return_http_data_only=True, async_req=True)
kwargs['sdg_scores_request'] = \
sdg_scores_request
return self.get_sdg_scores_post_endpoint.call_with_http_info(**kwargs)
def get_sdg_scores_post_with_http_info_async(
self,
sdg_scores_request,
**kwargs
) -> "ApplyResult[typing.Tuple[SdgScoresResponse, int, typing.MutableMapping]]":
"""Gets short-term, long-term, and momentum scores based on the 16 Sustainable Development Goals categories defined by United Nations. # noqa: E501
Truvalue Labs SDG Scores provides short-term, long-term, and momentum scores that are generated for 16 Sustainable Development Goals categories defined by the United Nations.* # noqa: E501
This method makes a asynchronous HTTP request. Returns http data, http status and headers, wrapped in ApplyResult
Args:
sdg_scores_request (SdgScoresRequest): The SDG Scores request body, allowing the user to specify a list of ids, Score types, SDG categories, date range, and frequency.
Keyword Args:
_preload_content (bool): if False, the urllib3.HTTPResponse object
will be returned without reading/decoding response data.
Default is True.
_request_timeout (int/float/tuple): timeout setting for this request. If
one number provided, it will be total request timeout. It can also
be a pair (tuple) of (connection, read) timeouts.
Default is None.
_check_input_type (bool): specifies if type checking
should be done one the data sent to the server.
Default is True.
_check_return_type (bool): specifies if type checking
should be done one the data received from the server.
Default is True.
_spec_property_naming (bool): True if the variable names in the input data
are serialized names, as specified in the OpenAPI document.
False if the variable names in the input data
are pythonic names, e.g. snake case (default)
_content_type (str/None): force body content-type.
Default is None and content-type will be predicted by allowed
content-types and body.
_host_index (int/None): specifies the index of the server
that we want to use.
Default is read from the configuration.
Returns:
ApplyResult[(SdgScoresResponse, int, typing.Dict)]
"""
self.apply_kwargs_defaults(kwargs=kwargs, return_http_data_only=False, async_req=True)
kwargs['sdg_scores_request'] = \
sdg_scores_request
return self.get_sdg_scores_post_endpoint.call_with_http_info(**kwargs)
| 147.448276
| 11,783
| 0.734416
| 12,228
| 94,072
| 5.608031
| 0.082352
| 0.007466
| 0.012133
| 0.016799
| 0.962727
| 0.95949
| 0.959431
| 0.955815
| 0.954823
| 0.954007
| 0
| 0.006695
| 0.20294
| 94,072
| 637
| 11,784
| 147.679749
| 0.90787
| 0.865146
| 0
| 0.608997
| 0
| 0
| 0.165102
| 0.033872
| 0
| 0
| 0
| 0
| 0
| 1
| 0.034602
| false
| 0
| 0.034602
| 0
| 0.100346
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 1
| 1
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 9
|
2eff50e137dfe7915f494fa5cbe1c7a2048832bc
| 23,739
|
py
|
Python
|
post_optimization_studies/mad_analyses/ma100MeV_L1pt8-2pt4TeV_binsize/Output/Histos/MadAnalysis5job_0/selection_12.py
|
sheride/axion_pheno
|
7d3fc08f5ae5b17a3500eba19a2e43f87f076ce5
|
[
"MIT"
] | null | null | null |
post_optimization_studies/mad_analyses/ma100MeV_L1pt8-2pt4TeV_binsize/Output/Histos/MadAnalysis5job_0/selection_12.py
|
sheride/axion_pheno
|
7d3fc08f5ae5b17a3500eba19a2e43f87f076ce5
|
[
"MIT"
] | null | null | null |
post_optimization_studies/mad_analyses/ma100MeV_L1pt8-2pt4TeV_binsize/Output/Histos/MadAnalysis5job_0/selection_12.py
|
sheride/axion_pheno
|
7d3fc08f5ae5b17a3500eba19a2e43f87f076ce5
|
[
"MIT"
] | null | null | null |
def selection_12():
# Library import
import numpy
import matplotlib
import matplotlib.pyplot as plt
import matplotlib.gridspec as gridspec
# Library version
matplotlib_version = matplotlib.__version__
numpy_version = numpy.__version__
# Histo binning
xBinning = numpy.linspace(0.0,4000.0,81,endpoint=True)
# Creating data sequence: middle of each bin
xData = numpy.array([25.0,75.0,125.0,175.0,225.0,275.0,325.0,375.0,425.0,475.0,525.0,575.0,625.0,675.0,725.0,775.0,825.0,875.0,925.0,975.0,1025.0,1075.0,1125.0,1175.0,1225.0,1275.0,1325.0,1375.0,1425.0,1475.0,1525.0,1575.0,1625.0,1675.0,1725.0,1775.0,1825.0,1875.0,1925.0,1975.0,2025.0,2075.0,2125.0,2175.0,2225.0,2275.0,2325.0,2375.0,2425.0,2475.0,2525.0,2575.0,2625.0,2675.0,2725.0,2775.0,2825.0,2875.0,2925.0,2975.0,3025.0,3075.0,3125.0,3175.0,3225.0,3275.0,3325.0,3375.0,3425.0,3475.0,3525.0,3575.0,3625.0,3675.0,3725.0,3775.0,3825.0,3875.0,3925.0,3975.0])
# Creating weights for histo: y13_THT_0
y13_THT_0_weights = numpy.array([11.277559974,190.789812642,279.550834567,339.653669098,361.412667119,357.167067505,333.284969678,290.297703589,257.393736582,207.507131121,176.991373897,146.210266698,112.510249764,96.8543611882,76.4220530472,65.2771740611,56.7858248336,43.2527660649,33.8326769219,29.985045272,23.4838628634,20.9629960928,16.3192935153,13.5330737688,10.0834670826,9.68543611882,8.35866223953,6.23582643267,5.83779546888,3.58228467409,2.65354385858,3.05157572237,1.99015781894,1.45944916722,1.45944916722,0.928740215504,1.19409469136,0.928740215504,1.06141760343,0.530708651716,0.265354385858,0.663386039645,0.265354385858,0.398031563787,0.265354385858,0.265354385858,0.265354385858,0.0,0.265354385858,0.265354385858,0.0,0.132677177929,0.0,0.0,0.0,0.0,0.0,0.132677177929,0.0,0.0,0.132677177929,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0])
# Creating weights for histo: y13_THT_1
y13_THT_1_weights = numpy.array([7.21301404942,128.069196502,177.033638879,219.749969894,236.823942289,237.622627156,221.683732123,182.480427471,166.538744654,127.42965508,115.966171099,93.2907031401,79.3405765937,63.3138214289,49.6904350514,43.9979318576,36.4633335392,28.290572702,22.5218191442,19.2330684325,16.1077207791,12.902753441,9.61691041711,7.45283234054,7.05328007346,5.60980195964,3.92774656782,3.52576023637,3.52701923519,3.60603040373,2.40492664838,2.48447199214,0.961932649579,0.641277641634,1.20203151757,0.880957143118,0.160298816785,0.881458644313,0.320737442391,0.320737442391,0.2405181146,0.240460710249,0.160298816785,0.240378335755,0.0,0.0802192978149,0.0,0.0,0.0,0.0,0.16015903794,0.160101603613,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.160101603613,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0])
# Creating weights for histo: y13_THT_2
y13_THT_2_weights = numpy.array([4.68762102666,86.6168289705,126.149097629,150.160142887,163.493795808,163.545905819,146.722532135,124.482387264,108.856973841,89.6377396321,75.5227965407,61.2515834151,51.5638312933,43.2823594795,35.2613177228,30.677886719,22.4484979166,19.1671631979,14.4274561598,12.656576772,10.7294453499,8.28146281377,6.97934552859,5.83348327763,4.89595907229,4.9480450837,2.29172600192,1.77087908785,2.50006464755,1.45837111941,1.19794766237,0.833354882518,0.677100748296,1.19794766237,0.364592779852,0.416677291259,0.104169352815,0.520846914074,0.312507968444,0.104169352815,0.312507968444,0.104169352815,0.104169352815,0.104169352815,0.156254044222,0.20833870563,0.0,0.0520846914074,0.0,0.104169352815,0.0,0.104169352815,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0])
# Creating weights for histo: y13_THT_3
y13_THT_3_weights = numpy.array([3.27167510628,62.0551109831,88.9752900859,106.720566086,116.926779537,111.983707866,106.044905858,88.6907999897,73.399284819,63.7976315724,54.0181682656,44.0253448866,35.4905520007,29.2672568964,23.043961792,19.4166725655,17.3896588801,12.4465842087,9.53052922263,8.28587080176,6.7567162847,5.54761987586,4.40964749107,3.27167510628,2.80937194995,2.70268711388,2.59600197781,1.45802839301,1.52915181706,1.17353529682,1.28022013289,0.995726736692,0.640110216445,0.568986792396,0.568986792396,0.497863368346,0.213370032148,0.248931714173,0.284493366198,0.106685016074,0.142246698099,0.142246698099,0.106685016074,0.0711233340495,0.0,0.0711233340495,0.0,0.0711233340495,0.0,0.0711233340495,0.0,0.0355616820247,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0355616820247,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0])
# Creating weights for histo: y13_THT_4
y13_THT_4_weights = numpy.array([0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0])
# Creating weights for histo: y13_THT_5
y13_THT_5_weights = numpy.array([0.0,0.0,158.005666306,78.97186254,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0])
# Creating weights for histo: y13_THT_6
y13_THT_6_weights = numpy.array([0.0,0.0,0.0,0.0,241.766740452,293.736568848,518.253813009,380.080886344,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0])
# Creating weights for histo: y13_THT_7
y13_THT_7_weights = numpy.array([0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,328.103919777,240.960653785,195.242473477,155.727705845,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0])
# Creating weights for histo: y13_THT_8
y13_THT_8_weights = numpy.array([0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,98.3081790494,78.6313680519,61.2334751532,31.7719316661,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0])
# Creating weights for histo: y13_THT_9
y13_THT_9_weights = numpy.array([0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,31.8324034158,26.3125760819,14.6387133953,12.3072685131,8.6975619818,7.4261766787,4.24207354759,4.88090138585,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0])
# Creating weights for histo: y13_THT_10
y13_THT_10_weights = numpy.array([0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,2.40054229988,1.37210682802,0.571944528488,1.02613255191,0.683844543261,0.456381825026,0.682764440122,0.689033824995,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0])
# Creating weights for histo: y13_THT_11
y13_THT_11_weights = numpy.array([0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.189543473619,0.121837123083,0.108294085898,0.0405525381686,0.0135727685641,0.0813239688136,0.0271356911534,0.0135002721612,0.0542407818257,0.013537753275,0.0,0.0271046835514,0.0,0.0,0.0,0.0135152259158,0.0,0.0,0.0135566280952,0.0135575231838,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0])
# Creating weights for histo: y13_THT_12
y13_THT_12_weights = numpy.array([0.0,3.6450972,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0])
# Creating weights for histo: y13_THT_13
y13_THT_13_weights = numpy.array([0.0,0.0,25.6050978981,61.7436365452,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0])
# Creating weights for histo: y13_THT_14
y13_THT_14_weights = numpy.array([0.0,0.0,0.0,0.0,90.757652125,122.109282343,119.648122225,120.85230061,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0])
# Creating weights for histo: y13_THT_15
y13_THT_15_weights = numpy.array([0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,111.982478312,89.8533255115,77.1216666061,53.8082146899,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0])
# Creating weights for histo: y13_THT_16
y13_THT_16_weights = numpy.array([0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,45.2230525732,33.4445367443,25.5782339406,18.7362626319,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0])
# Creating weights for histo: y13_THT_17
y13_THT_17_weights = numpy.array([0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,12.9500194074,9.76786541051,7.10707376695,5.51939778414,4.40133863128,2.98527297971,2.40634228821,1.58777570395,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0])
# Creating weights for histo: y13_THT_18
y13_THT_18_weights = numpy.array([0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,1.18877661878,0.888895564313,0.661042565954,0.48420836674,0.323842734393,0.23648257266,0.215350384511,0.118166572081,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0])
# Creating weights for histo: y13_THT_19
y13_THT_19_weights = numpy.array([0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.108519619226,0.0660106511801,0.0638311278544,0.04473009689,0.0360407688582,0.025567548559,0.0234245636981,0.0147161590463,0.0168931321408,0.00211496461359,0.00851510381137,0.00211496461359,0.00426586298655,0.00425990836383,0.00425604515339,0.0063844836739,0.00212624019897,0.0,0.0,0.0,0.0,0.0,0.0,0.00209795490581,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0])
# Creating a new Canvas
fig = plt.figure(figsize=(12,6),dpi=80)
frame = gridspec.GridSpec(1,1,right=0.7)
pad = fig.add_subplot(frame[0])
# Creating a new Stack
pad.hist(x=xData, bins=xBinning, weights=y13_THT_0_weights+y13_THT_1_weights+y13_THT_2_weights+y13_THT_3_weights+y13_THT_4_weights+y13_THT_5_weights+y13_THT_6_weights+y13_THT_7_weights+y13_THT_8_weights+y13_THT_9_weights+y13_THT_10_weights+y13_THT_11_weights+y13_THT_12_weights+y13_THT_13_weights+y13_THT_14_weights+y13_THT_15_weights+y13_THT_16_weights+y13_THT_17_weights+y13_THT_18_weights+y13_THT_19_weights,\
label="$bg\_vbf\_1600\_inf$", histtype="step", rwidth=1.0,\
color=None, edgecolor="#ccc6aa", linewidth=1, linestyle="solid",\
bottom=None, cumulative=False, normed=False, align="mid", orientation="vertical")
pad.hist(x=xData, bins=xBinning, weights=y13_THT_0_weights+y13_THT_1_weights+y13_THT_2_weights+y13_THT_3_weights+y13_THT_4_weights+y13_THT_5_weights+y13_THT_6_weights+y13_THT_7_weights+y13_THT_8_weights+y13_THT_9_weights+y13_THT_10_weights+y13_THT_11_weights+y13_THT_12_weights+y13_THT_13_weights+y13_THT_14_weights+y13_THT_15_weights+y13_THT_16_weights+y13_THT_17_weights+y13_THT_18_weights,\
label="$bg\_vbf\_1200\_1600$", histtype="step", rwidth=1.0,\
color=None, edgecolor="#c1bfa8", linewidth=1, linestyle="solid",\
bottom=None, cumulative=False, normed=False, align="mid", orientation="vertical")
pad.hist(x=xData, bins=xBinning, weights=y13_THT_0_weights+y13_THT_1_weights+y13_THT_2_weights+y13_THT_3_weights+y13_THT_4_weights+y13_THT_5_weights+y13_THT_6_weights+y13_THT_7_weights+y13_THT_8_weights+y13_THT_9_weights+y13_THT_10_weights+y13_THT_11_weights+y13_THT_12_weights+y13_THT_13_weights+y13_THT_14_weights+y13_THT_15_weights+y13_THT_16_weights+y13_THT_17_weights,\
label="$bg\_vbf\_800\_1200$", histtype="step", rwidth=1.0,\
color=None, edgecolor="#bab5a3", linewidth=1, linestyle="solid",\
bottom=None, cumulative=False, normed=False, align="mid", orientation="vertical")
pad.hist(x=xData, bins=xBinning, weights=y13_THT_0_weights+y13_THT_1_weights+y13_THT_2_weights+y13_THT_3_weights+y13_THT_4_weights+y13_THT_5_weights+y13_THT_6_weights+y13_THT_7_weights+y13_THT_8_weights+y13_THT_9_weights+y13_THT_10_weights+y13_THT_11_weights+y13_THT_12_weights+y13_THT_13_weights+y13_THT_14_weights+y13_THT_15_weights+y13_THT_16_weights,\
label="$bg\_vbf\_600\_800$", histtype="step", rwidth=1.0,\
color=None, edgecolor="#b2a596", linewidth=1, linestyle="solid",\
bottom=None, cumulative=False, normed=False, align="mid", orientation="vertical")
pad.hist(x=xData, bins=xBinning, weights=y13_THT_0_weights+y13_THT_1_weights+y13_THT_2_weights+y13_THT_3_weights+y13_THT_4_weights+y13_THT_5_weights+y13_THT_6_weights+y13_THT_7_weights+y13_THT_8_weights+y13_THT_9_weights+y13_THT_10_weights+y13_THT_11_weights+y13_THT_12_weights+y13_THT_13_weights+y13_THT_14_weights+y13_THT_15_weights,\
label="$bg\_vbf\_400\_600$", histtype="step", rwidth=1.0,\
color=None, edgecolor="#b7a39b", linewidth=1, linestyle="solid",\
bottom=None, cumulative=False, normed=False, align="mid", orientation="vertical")
pad.hist(x=xData, bins=xBinning, weights=y13_THT_0_weights+y13_THT_1_weights+y13_THT_2_weights+y13_THT_3_weights+y13_THT_4_weights+y13_THT_5_weights+y13_THT_6_weights+y13_THT_7_weights+y13_THT_8_weights+y13_THT_9_weights+y13_THT_10_weights+y13_THT_11_weights+y13_THT_12_weights+y13_THT_13_weights+y13_THT_14_weights,\
label="$bg\_vbf\_200\_400$", histtype="step", rwidth=1.0,\
color=None, edgecolor="#ad998c", linewidth=1, linestyle="solid",\
bottom=None, cumulative=False, normed=False, align="mid", orientation="vertical")
pad.hist(x=xData, bins=xBinning, weights=y13_THT_0_weights+y13_THT_1_weights+y13_THT_2_weights+y13_THT_3_weights+y13_THT_4_weights+y13_THT_5_weights+y13_THT_6_weights+y13_THT_7_weights+y13_THT_8_weights+y13_THT_9_weights+y13_THT_10_weights+y13_THT_11_weights+y13_THT_12_weights+y13_THT_13_weights,\
label="$bg\_vbf\_100\_200$", histtype="step", rwidth=1.0,\
color=None, edgecolor="#9b8e82", linewidth=1, linestyle="solid",\
bottom=None, cumulative=False, normed=False, align="mid", orientation="vertical")
pad.hist(x=xData, bins=xBinning, weights=y13_THT_0_weights+y13_THT_1_weights+y13_THT_2_weights+y13_THT_3_weights+y13_THT_4_weights+y13_THT_5_weights+y13_THT_6_weights+y13_THT_7_weights+y13_THT_8_weights+y13_THT_9_weights+y13_THT_10_weights+y13_THT_11_weights+y13_THT_12_weights,\
label="$bg\_vbf\_0\_100$", histtype="step", rwidth=1.0,\
color=None, edgecolor="#876656", linewidth=1, linestyle="solid",\
bottom=None, cumulative=False, normed=False, align="mid", orientation="vertical")
pad.hist(x=xData, bins=xBinning, weights=y13_THT_0_weights+y13_THT_1_weights+y13_THT_2_weights+y13_THT_3_weights+y13_THT_4_weights+y13_THT_5_weights+y13_THT_6_weights+y13_THT_7_weights+y13_THT_8_weights+y13_THT_9_weights+y13_THT_10_weights+y13_THT_11_weights,\
label="$bg\_dip\_1600\_inf$", histtype="step", rwidth=1.0,\
color=None, edgecolor="#afcec6", linewidth=1, linestyle="solid",\
bottom=None, cumulative=False, normed=False, align="mid", orientation="vertical")
pad.hist(x=xData, bins=xBinning, weights=y13_THT_0_weights+y13_THT_1_weights+y13_THT_2_weights+y13_THT_3_weights+y13_THT_4_weights+y13_THT_5_weights+y13_THT_6_weights+y13_THT_7_weights+y13_THT_8_weights+y13_THT_9_weights+y13_THT_10_weights,\
label="$bg\_dip\_1200\_1600$", histtype="step", rwidth=1.0,\
color=None, edgecolor="#84c1a3", linewidth=1, linestyle="solid",\
bottom=None, cumulative=False, normed=False, align="mid", orientation="vertical")
pad.hist(x=xData, bins=xBinning, weights=y13_THT_0_weights+y13_THT_1_weights+y13_THT_2_weights+y13_THT_3_weights+y13_THT_4_weights+y13_THT_5_weights+y13_THT_6_weights+y13_THT_7_weights+y13_THT_8_weights+y13_THT_9_weights,\
label="$bg\_dip\_800\_1200$", histtype="step", rwidth=1.0,\
color=None, edgecolor="#89a8a0", linewidth=1, linestyle="solid",\
bottom=None, cumulative=False, normed=False, align="mid", orientation="vertical")
pad.hist(x=xData, bins=xBinning, weights=y13_THT_0_weights+y13_THT_1_weights+y13_THT_2_weights+y13_THT_3_weights+y13_THT_4_weights+y13_THT_5_weights+y13_THT_6_weights+y13_THT_7_weights+y13_THT_8_weights,\
label="$bg\_dip\_600\_800$", histtype="step", rwidth=1.0,\
color=None, edgecolor="#829e8c", linewidth=1, linestyle="solid",\
bottom=None, cumulative=False, normed=False, align="mid", orientation="vertical")
pad.hist(x=xData, bins=xBinning, weights=y13_THT_0_weights+y13_THT_1_weights+y13_THT_2_weights+y13_THT_3_weights+y13_THT_4_weights+y13_THT_5_weights+y13_THT_6_weights+y13_THT_7_weights,\
label="$bg\_dip\_400\_600$", histtype="step", rwidth=1.0,\
color=None, edgecolor="#adbcc6", linewidth=1, linestyle="solid",\
bottom=None, cumulative=False, normed=False, align="mid", orientation="vertical")
pad.hist(x=xData, bins=xBinning, weights=y13_THT_0_weights+y13_THT_1_weights+y13_THT_2_weights+y13_THT_3_weights+y13_THT_4_weights+y13_THT_5_weights+y13_THT_6_weights,\
label="$bg\_dip\_200\_400$", histtype="step", rwidth=1.0,\
color=None, edgecolor="#7a8e99", linewidth=1, linestyle="solid",\
bottom=None, cumulative=False, normed=False, align="mid", orientation="vertical")
pad.hist(x=xData, bins=xBinning, weights=y13_THT_0_weights+y13_THT_1_weights+y13_THT_2_weights+y13_THT_3_weights+y13_THT_4_weights+y13_THT_5_weights,\
label="$bg\_dip\_100\_200$", histtype="step", rwidth=1.0,\
color=None, edgecolor="#758991", linewidth=1, linestyle="solid",\
bottom=None, cumulative=False, normed=False, align="mid", orientation="vertical")
pad.hist(x=xData, bins=xBinning, weights=y13_THT_0_weights+y13_THT_1_weights+y13_THT_2_weights+y13_THT_3_weights+y13_THT_4_weights,\
label="$bg\_dip\_0\_100$", histtype="step", rwidth=1.0,\
color=None, edgecolor="#688296", linewidth=1, linestyle="solid",\
bottom=None, cumulative=False, normed=False, align="mid", orientation="vertical")
pad.hist(x=xData, bins=xBinning, weights=y13_THT_0_weights+y13_THT_1_weights+y13_THT_2_weights+y13_THT_3_weights,\
label="$signal\_2pt4TeVL$", histtype="step", rwidth=1.0,\
color=None, edgecolor="#6d7a84", linewidth=1, linestyle="solid",\
bottom=None, cumulative=False, normed=False, align="mid", orientation="vertical")
pad.hist(x=xData, bins=xBinning, weights=y13_THT_0_weights+y13_THT_1_weights+y13_THT_2_weights,\
label="$signal\_2pt2TeVL$", histtype="step", rwidth=1.0,\
color=None, edgecolor="#7c99d1", linewidth=1, linestyle="solid",\
bottom=None, cumulative=False, normed=False, align="mid", orientation="vertical")
pad.hist(x=xData, bins=xBinning, weights=y13_THT_0_weights+y13_THT_1_weights,\
label="$signal\_2TeVL$", histtype="step", rwidth=1.0,\
color=None, edgecolor="#7f7f9b", linewidth=1, linestyle="solid",\
bottom=None, cumulative=False, normed=False, align="mid", orientation="vertical")
pad.hist(x=xData, bins=xBinning, weights=y13_THT_0_weights,\
label="$signal\_1pt8TeVL$", histtype="step", rwidth=1.0,\
color=None, edgecolor="#aaa5bf", linewidth=1, linestyle="solid",\
bottom=None, cumulative=False, normed=False, align="mid", orientation="vertical")
# Axis
plt.rc('text',usetex=False)
plt.xlabel(r"THT",\
fontsize=16,color="black")
plt.ylabel(r"$\mathrm{Events}$ $(\mathcal{L}_{\mathrm{int}} = 3000.0\ \mathrm{fb}^{-1})$ ",\
fontsize=16,color="black")
# Boundary of y-axis
ymax=(y13_THT_0_weights+y13_THT_1_weights+y13_THT_2_weights+y13_THT_3_weights+y13_THT_4_weights+y13_THT_5_weights+y13_THT_6_weights+y13_THT_7_weights+y13_THT_8_weights+y13_THT_9_weights+y13_THT_10_weights+y13_THT_11_weights+y13_THT_12_weights+y13_THT_13_weights+y13_THT_14_weights+y13_THT_15_weights+y13_THT_16_weights+y13_THT_17_weights+y13_THT_18_weights+y13_THT_19_weights).max()*1.1
ymin=0 # linear scale
#ymin=min([x for x in (y13_THT_0_weights+y13_THT_1_weights+y13_THT_2_weights+y13_THT_3_weights+y13_THT_4_weights+y13_THT_5_weights+y13_THT_6_weights+y13_THT_7_weights+y13_THT_8_weights+y13_THT_9_weights+y13_THT_10_weights+y13_THT_11_weights+y13_THT_12_weights+y13_THT_13_weights+y13_THT_14_weights+y13_THT_15_weights+y13_THT_16_weights+y13_THT_17_weights+y13_THT_18_weights+y13_THT_19_weights) if x])/100. # log scale
plt.gca().set_ylim(ymin,ymax)
# Log/Linear scale for X-axis
plt.gca().set_xscale("linear")
#plt.gca().set_xscale("log",nonposx="clip")
# Log/Linear scale for Y-axis
plt.gca().set_yscale("linear")
#plt.gca().set_yscale("log",nonposy="clip")
# Legend
plt.legend(bbox_to_anchor=(1.05,1), loc=2, borderaxespad=0.)
# Saving the image
plt.savefig('../../HTML/MadAnalysis5job_0/selection_12.png')
plt.savefig('../../PDF/MadAnalysis5job_0/selection_12.png')
plt.savefig('../../DVI/MadAnalysis5job_0/selection_12.eps')
# Running!
if __name__ == '__main__':
selection_12()
| 108.894495
| 891
| 0.707865
| 5,536
| 23,739
| 2.866149
| 0.107659
| 0.32596
| 0.47854
| 0.624693
| 0.673914
| 0.666604
| 0.666604
| 0.651289
| 0.641709
| 0.641142
| 0
| 0.359528
| 0.079068
| 23,739
| 217
| 892
| 109.396313
| 0.366252
| 0.06424
| 0
| 0.171875
| 0
| 0.007813
| 0.052446
| 0.009154
| 0
| 0
| 0
| 0
| 0
| 1
| 0.007813
| false
| 0
| 0.03125
| 0
| 0.039063
| 0
| 0
| 0
| 0
| null | 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 0
| 0
| 0
| 0
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 7
|
2c1570c5575d7b28d41da8e656e242cf54e480b8
| 32,180
|
py
|
Python
|
dnacentersdk/api/v2_2_2_3/licenses.py
|
oboehmer/dnacentersdk
|
25c4e99900640deee91a56aa886874d9cb0ca960
|
[
"MIT"
] | 32
|
2019-09-05T05:16:56.000Z
|
2022-03-22T09:50:38.000Z
|
dnacentersdk/api/v2_2_2_3/licenses.py
|
oboehmer/dnacentersdk
|
25c4e99900640deee91a56aa886874d9cb0ca960
|
[
"MIT"
] | 35
|
2019-09-07T18:58:54.000Z
|
2022-03-24T19:29:36.000Z
|
dnacentersdk/api/v2_2_2_3/licenses.py
|
oboehmer/dnacentersdk
|
25c4e99900640deee91a56aa886874d9cb0ca960
|
[
"MIT"
] | 18
|
2019-09-09T11:07:21.000Z
|
2022-03-25T08:49:59.000Z
|
# -*- coding: utf-8 -*-
"""Cisco DNA Center Licenses API wrapper.
Copyright (c) 2019-2021 Cisco Systems.
Permission is hereby granted, free of charge, to any person obtaining a copy
of this software and associated documentation files (the "Software"), to deal
in the Software without restriction, including without limitation the rights
to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
copies of the Software, and to permit persons to whom the Software is
furnished to do so, subject to the following conditions:
The above copyright notice and this permission notice shall be included in all
copies or substantial portions of the Software.
THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE
SOFTWARE.
"""
from __future__ import (
absolute_import,
division,
print_function,
unicode_literals,
)
from builtins import *
from past.builtins import basestring
from ...restsession import RestSession
from ...utils import (
check_type,
dict_from_items_with_values,
apply_path_params,
dict_of_str,
)
class Licenses(object):
"""Cisco DNA Center Licenses API (version: 2.2.2.3).
Wraps the DNA Center Licenses
API and exposes the API as native Python
methods that return native Python objects.
"""
def __init__(self, session, object_factory, request_validator):
"""Initialize a new Licenses
object with the provided RestSession.
Args:
session(RestSession): The RESTful session object to be used for
API calls to the DNA Center service.
Raises:
TypeError: If the parameter types are incorrect.
"""
check_type(session, RestSession)
super(Licenses, self).__init__()
self._session = session
self._object_factory = object_factory
self._request_validator = request_validator
def device_count_details(self,
device_type=None,
dna_level=None,
registration_status=None,
smart_account_id=None,
virtual_account_name=None,
headers=None,
**request_parameters):
"""Get total number of managed device(s). .
Args:
device_type(basestring): device_type query parameter. Type of device .
registration_status(basestring): registration_status query parameter. Smart license registration status
of device .
dna_level(basestring): dna_level query parameter. Device Cisco DNA license level .
virtual_account_name(basestring): virtual_account_name query parameter. Name of virtual account .
smart_account_id(basestring): smart_account_id query parameter. Id of smart account .
headers(dict): Dictionary of HTTP Headers to send with the Request
.
**request_parameters: Additional request parameters (provides
support for parameters that may be added in the future).
Returns:
MyDict: JSON response. Access the object's properties by using
the dot notation or the bracket notation.
Raises:
TypeError: If the parameter types are incorrect.
MalformedRequest: If the request body created is invalid.
ApiError: If the DNA Center cloud returns an error.
"""
check_type(headers, dict)
check_type(device_type, basestring)
check_type(registration_status, basestring)
check_type(dna_level, basestring)
check_type(virtual_account_name, basestring)
check_type(smart_account_id, basestring)
if headers is not None:
if 'X-Auth-Token' in headers:
check_type(headers.get('X-Auth-Token'),
basestring, may_be_none=False)
_params = {
'device_type':
device_type,
'registration_status':
registration_status,
'dna_level':
dna_level,
'virtual_account_name':
virtual_account_name,
'smart_account_id':
smart_account_id,
}
_params.update(request_parameters)
_params = dict_from_items_with_values(_params)
path_params = {
}
with_custom_headers = False
_headers = self._session.headers or {}
if headers:
_headers.update(dict_of_str(headers))
with_custom_headers = True
e_url = ('/dna/intent/api/v1/licenses/device/count')
endpoint_full_url = apply_path_params(e_url, path_params)
if with_custom_headers:
json_data = self._session.get(endpoint_full_url, params=_params,
headers=_headers)
else:
json_data = self._session.get(endpoint_full_url, params=_params)
return self._object_factory('bpm_c0cf04bdc758b29bb11abbdacbd921_v2_2_2_3', json_data)
def device_license_summary(self,
limit,
order,
page_number,
device_type=None,
device_uuid=None,
dna_level=None,
registration_status=None,
smart_account_id=None,
sort_by=None,
virtual_account_name=None,
headers=None,
**request_parameters):
"""Show license summary of device(s). .
Args:
page_number(int): page_number query parameter. Page number of response .
order(basestring): order query parameter. Sorting order .
sort_by(basestring): sort_by query parameter. Sort result by field .
dna_level(basestring): dna_level query parameter. Device Cisco DNA license level .
device_type(basestring): device_type query parameter. Type of device .
limit(int): limit query parameter.
registration_status(basestring): registration_status query parameter. Smart license registration status
of device .
virtual_account_name(basestring): virtual_account_name query parameter. Name of virtual account .
smart_account_id(int): smart_account_id query parameter. Id of smart account .
device_uuid(basestring): device_uuid query parameter. Id of device .
headers(dict): Dictionary of HTTP Headers to send with the Request
.
**request_parameters: Additional request parameters (provides
support for parameters that may be added in the future).
Returns:
MyDict: JSON response. Access the object's properties by using
the dot notation or the bracket notation.
Raises:
TypeError: If the parameter types are incorrect.
MalformedRequest: If the request body created is invalid.
ApiError: If the DNA Center cloud returns an error.
"""
check_type(headers, dict)
check_type(page_number, int,
may_be_none=False)
check_type(order, basestring,
may_be_none=False)
check_type(sort_by, basestring)
check_type(dna_level, basestring)
check_type(device_type, basestring)
check_type(limit, int,
may_be_none=False)
check_type(registration_status, basestring)
check_type(virtual_account_name, basestring)
check_type(smart_account_id, int)
check_type(device_uuid, basestring)
if headers is not None:
if 'X-Auth-Token' in headers:
check_type(headers.get('X-Auth-Token'),
basestring, may_be_none=False)
_params = {
'page_number':
page_number,
'order':
order,
'sort_by':
sort_by,
'dna_level':
dna_level,
'device_type':
device_type,
'limit':
limit,
'registration_status':
registration_status,
'virtual_account_name':
virtual_account_name,
'smart_account_id':
smart_account_id,
'device_uuid':
device_uuid,
}
_params.update(request_parameters)
_params = dict_from_items_with_values(_params)
path_params = {
}
with_custom_headers = False
_headers = self._session.headers or {}
if headers:
_headers.update(dict_of_str(headers))
with_custom_headers = True
e_url = ('/dna/intent/api/v1/licenses/device/summary')
endpoint_full_url = apply_path_params(e_url, path_params)
if with_custom_headers:
json_data = self._session.get(endpoint_full_url, params=_params,
headers=_headers)
else:
json_data = self._session.get(endpoint_full_url, params=_params)
return self._object_factory('bpm_f4ba64eef4085d518a612835e128fe3c_v2_2_2_3', json_data)
def device_license_details(self,
device_uuid,
headers=None,
**request_parameters):
"""Get detailed license information of a device. .
Args:
device_uuid(basestring): device_uuid path parameter. Id of device .
headers(dict): Dictionary of HTTP Headers to send with the Request
.
**request_parameters: Additional request parameters (provides
support for parameters that may be added in the future).
Returns:
MyDict: JSON response. Access the object's properties by using
the dot notation or the bracket notation.
Raises:
TypeError: If the parameter types are incorrect.
MalformedRequest: If the request body created is invalid.
ApiError: If the DNA Center cloud returns an error.
"""
check_type(headers, dict)
check_type(device_uuid, basestring,
may_be_none=False)
if headers is not None:
if 'X-Auth-Token' in headers:
check_type(headers.get('X-Auth-Token'),
basestring, may_be_none=False)
_params = {
}
_params.update(request_parameters)
_params = dict_from_items_with_values(_params)
path_params = {
'device_uuid': device_uuid,
}
with_custom_headers = False
_headers = self._session.headers or {}
if headers:
_headers.update(dict_of_str(headers))
with_custom_headers = True
e_url = ('/dna/intent/api/v1/licenses/device/{device_uuid}/details')
endpoint_full_url = apply_path_params(e_url, path_params)
if with_custom_headers:
json_data = self._session.get(endpoint_full_url, params=_params,
headers=_headers)
else:
json_data = self._session.get(endpoint_full_url, params=_params)
return self._object_factory('bpm_f04f865c01d5c17a5f0cb5abe620dd8_v2_2_2_3', json_data)
def device_deregistration(self,
device_uuids=None,
headers=None,
payload=None,
active_validation=True,
**request_parameters):
"""Deregister device(s) from CSSM(Cisco Smart Software Manager). .
Args:
device_uuids(list): Licenses's Comma separated device ids (list of strings).
headers(dict): Dictionary of HTTP Headers to send with the Request
.
payload(dict): A JSON serializable Python object to send in the
body of the Request.
active_validation(bool): Enable/Disable payload validation.
Defaults to True.
**request_parameters: Additional request parameters (provides
support for parameters that may be added in the future).
Returns:
MyDict: JSON response. Access the object's properties by using
the dot notation or the bracket notation.
Raises:
TypeError: If the parameter types are incorrect.
MalformedRequest: If the request body created is invalid.
ApiError: If the DNA Center cloud returns an error.
"""
check_type(headers, dict)
check_type(payload, dict)
if headers is not None:
if 'Content-Type' in headers:
check_type(headers.get('Content-Type'),
basestring, may_be_none=False)
if 'X-Auth-Token' in headers:
check_type(headers.get('X-Auth-Token'),
basestring, may_be_none=False)
_params = {
}
_params.update(request_parameters)
_params = dict_from_items_with_values(_params)
path_params = {
}
_payload = {
'device_uuids':
device_uuids,
}
_payload.update(payload or {})
_payload = dict_from_items_with_values(_payload)
if active_validation:
self._request_validator('jsd_b2f15d0c54c2862a60a904289ddd_v2_2_2_3')\
.validate(_payload)
with_custom_headers = False
_headers = self._session.headers or {}
if headers:
_headers.update(dict_of_str(headers))
with_custom_headers = True
e_url = ('/dna/intent/api/v1/licenses/smartAccount/virtualAccount/'
+ 'deregister')
endpoint_full_url = apply_path_params(e_url, path_params)
if with_custom_headers:
json_data = self._session.put(endpoint_full_url, params=_params,
json=_payload,
headers=_headers)
else:
json_data = self._session.put(endpoint_full_url, params=_params,
json=_payload)
return self._object_factory('bpm_b2f15d0c54c2862a60a904289ddd_v2_2_2_3', json_data)
def device_registration(self,
virtual_account_name,
device_uuids=None,
headers=None,
payload=None,
active_validation=True,
**request_parameters):
"""Register device(s) in CSSM(Cisco Smart Software Manager). .
Args:
device_uuids(list): Licenses's Comma separated device ids (list of strings).
virtual_account_name(basestring): virtual_account_name path parameter. Name of virtual account .
headers(dict): Dictionary of HTTP Headers to send with the Request
.
payload(dict): A JSON serializable Python object to send in the
body of the Request.
active_validation(bool): Enable/Disable payload validation.
Defaults to True.
**request_parameters: Additional request parameters (provides
support for parameters that may be added in the future).
Returns:
MyDict: JSON response. Access the object's properties by using
the dot notation or the bracket notation.
Raises:
TypeError: If the parameter types are incorrect.
MalformedRequest: If the request body created is invalid.
ApiError: If the DNA Center cloud returns an error.
"""
check_type(headers, dict)
check_type(payload, dict)
check_type(virtual_account_name, basestring,
may_be_none=False)
if headers is not None:
if 'Content-Type' in headers:
check_type(headers.get('Content-Type'),
basestring, may_be_none=False)
if 'X-Auth-Token' in headers:
check_type(headers.get('X-Auth-Token'),
basestring, may_be_none=False)
_params = {
}
_params.update(request_parameters)
_params = dict_from_items_with_values(_params)
path_params = {
'virtual_account_name': virtual_account_name,
}
_payload = {
'device_uuids':
device_uuids,
}
_payload.update(payload or {})
_payload = dict_from_items_with_values(_payload)
if active_validation:
self._request_validator('jsd_df26f516755a50b5b5477324cf5cb649_v2_2_2_3')\
.validate(_payload)
with_custom_headers = False
_headers = self._session.headers or {}
if headers:
_headers.update(dict_of_str(headers))
with_custom_headers = True
e_url = ('/dna/intent/api/v1/licenses/smartAccount/virtualAccount/'
+ '{virtual_account_name}/register')
endpoint_full_url = apply_path_params(e_url, path_params)
if with_custom_headers:
json_data = self._session.put(endpoint_full_url, params=_params,
json=_payload,
headers=_headers)
else:
json_data = self._session.put(endpoint_full_url, params=_params,
json=_payload)
return self._object_factory('bpm_df26f516755a50b5b5477324cf5cb649_v2_2_2_3', json_data)
def change_virtual_account(self,
smart_account_id,
virtual_account_name,
device_uuids=None,
headers=None,
payload=None,
active_validation=True,
**request_parameters):
"""Transfer device(s) from one virtual account to another within same smart account. .
Args:
device_uuids(list): Licenses's Comma separated device ids (list of strings).
smart_account_id(basestring): smart_account_id path parameter. Id of smart account .
virtual_account_name(basestring): virtual_account_name path parameter. Name of target virtual account .
headers(dict): Dictionary of HTTP Headers to send with the Request
.
payload(dict): A JSON serializable Python object to send in the
body of the Request.
active_validation(bool): Enable/Disable payload validation.
Defaults to True.
**request_parameters: Additional request parameters (provides
support for parameters that may be added in the future).
Returns:
MyDict: JSON response. Access the object's properties by using
the dot notation or the bracket notation.
Raises:
TypeError: If the parameter types are incorrect.
MalformedRequest: If the request body created is invalid.
ApiError: If the DNA Center cloud returns an error.
"""
check_type(headers, dict)
check_type(payload, dict)
check_type(smart_account_id, basestring,
may_be_none=False)
check_type(virtual_account_name, basestring,
may_be_none=False)
if headers is not None:
if 'Content-Type' in headers:
check_type(headers.get('Content-Type'),
basestring, may_be_none=False)
if 'X-Auth-Token' in headers:
check_type(headers.get('X-Auth-Token'),
basestring, may_be_none=False)
_params = {
}
_params.update(request_parameters)
_params = dict_from_items_with_values(_params)
path_params = {
'smart_account_id': smart_account_id,
'virtual_account_name': virtual_account_name,
}
_payload = {
'device_uuids':
device_uuids,
}
_payload.update(payload or {})
_payload = dict_from_items_with_values(_payload)
if active_validation:
self._request_validator('jsd_bd5b507f58a50aab614e3d7409eec4c_v2_2_2_3')\
.validate(_payload)
with_custom_headers = False
_headers = self._session.headers or {}
if headers:
_headers.update(dict_of_str(headers))
with_custom_headers = True
e_url = ('/dna/intent/api/v1/licenses/smartAccount/{smart_account_'
+ 'id}/virtualAccount/{virtual_account_name}/device/transfe'
+ 'r')
endpoint_full_url = apply_path_params(e_url, path_params)
if with_custom_headers:
json_data = self._session.post(endpoint_full_url, params=_params,
json=_payload,
headers=_headers)
else:
json_data = self._session.post(endpoint_full_url, params=_params,
json=_payload)
return self._object_factory('bpm_bd5b507f58a50aab614e3d7409eec4c_v2_2_2_3', json_data)
def virtual_account_details(self,
smart_account_id,
headers=None,
**request_parameters):
"""Get virtual account details of a smart account. .
Args:
smart_account_id(basestring): smart_account_id path parameter. Id of smart account .
headers(dict): Dictionary of HTTP Headers to send with the Request
.
**request_parameters: Additional request parameters (provides
support for parameters that may be added in the future).
Returns:
MyDict: JSON response. Access the object's properties by using
the dot notation or the bracket notation.
Raises:
TypeError: If the parameter types are incorrect.
MalformedRequest: If the request body created is invalid.
ApiError: If the DNA Center cloud returns an error.
"""
check_type(headers, dict)
check_type(smart_account_id, basestring,
may_be_none=False)
if headers is not None:
if 'X-Auth-Token' in headers:
check_type(headers.get('X-Auth-Token'),
basestring, may_be_none=False)
_params = {
}
_params.update(request_parameters)
_params = dict_from_items_with_values(_params)
path_params = {
'smart_account_id': smart_account_id,
}
with_custom_headers = False
_headers = self._session.headers or {}
if headers:
_headers.update(dict_of_str(headers))
with_custom_headers = True
e_url = ('/dna/intent/api/v1/licenses/smartAccount/{smart_account_'
+ 'id}/virtualAccounts')
endpoint_full_url = apply_path_params(e_url, path_params)
if with_custom_headers:
json_data = self._session.get(endpoint_full_url, params=_params,
headers=_headers)
else:
json_data = self._session.get(endpoint_full_url, params=_params)
return self._object_factory('bpm_ab450b197375fa9bcd95219113a3075_v2_2_2_3', json_data)
def smart_account_details(self,
headers=None,
**request_parameters):
"""Get detail of all smart accounts. .
Args:
headers(dict): Dictionary of HTTP Headers to send with the Request
.
**request_parameters: Additional request parameters (provides
support for parameters that may be added in the future).
Returns:
MyDict: JSON response. Access the object's properties by using
the dot notation or the bracket notation.
Raises:
TypeError: If the parameter types are incorrect.
MalformedRequest: If the request body created is invalid.
ApiError: If the DNA Center cloud returns an error.
"""
check_type(headers, dict)
if headers is not None:
if 'X-Auth-Token' in headers:
check_type(headers.get('X-Auth-Token'),
basestring, may_be_none=False)
_params = {
}
_params.update(request_parameters)
_params = dict_from_items_with_values(_params)
path_params = {
}
with_custom_headers = False
_headers = self._session.headers or {}
if headers:
_headers.update(dict_of_str(headers))
with_custom_headers = True
e_url = ('/dna/intent/api/v1/licenses/smartAccounts')
endpoint_full_url = apply_path_params(e_url, path_params)
if with_custom_headers:
json_data = self._session.get(endpoint_full_url, params=_params,
headers=_headers)
else:
json_data = self._session.get(endpoint_full_url, params=_params)
return self._object_factory('bpm_ea3fdbde23325051a76b9d062c2962a0_v2_2_2_3', json_data)
def license_term_details(self,
device_type,
smart_account_id,
virtual_account_name,
headers=None,
**request_parameters):
"""Get license term details. .
Args:
smart_account_id(basestring): smart_account_id path parameter. Id of smart account .
virtual_account_name(basestring): virtual_account_name path parameter. Name of virtual account. Putting
"All" will give license term detail for all virtual accounts. .
device_type(basestring): device_type query parameter. Type of device like router, switch, wireless or
ise .
headers(dict): Dictionary of HTTP Headers to send with the Request
.
**request_parameters: Additional request parameters (provides
support for parameters that may be added in the future).
Returns:
MyDict: JSON response. Access the object's properties by using
the dot notation or the bracket notation.
Raises:
TypeError: If the parameter types are incorrect.
MalformedRequest: If the request body created is invalid.
ApiError: If the DNA Center cloud returns an error.
"""
check_type(headers, dict)
check_type(device_type, basestring,
may_be_none=False)
check_type(smart_account_id, basestring,
may_be_none=False)
check_type(virtual_account_name, basestring,
may_be_none=False)
if headers is not None:
if 'X-Auth-Token' in headers:
check_type(headers.get('X-Auth-Token'),
basestring, may_be_none=False)
_params = {
'device_type':
device_type,
}
_params.update(request_parameters)
_params = dict_from_items_with_values(_params)
path_params = {
'smart_account_id': smart_account_id,
'virtual_account_name': virtual_account_name,
}
with_custom_headers = False
_headers = self._session.headers or {}
if headers:
_headers.update(dict_of_str(headers))
with_custom_headers = True
e_url = ('/dna/intent/api/v1/licenses/term/smartAccount/{smart_acc'
+ 'ount_id}/virtualAccount/{virtual_account_name}')
endpoint_full_url = apply_path_params(e_url, path_params)
if with_custom_headers:
json_data = self._session.get(endpoint_full_url, params=_params,
headers=_headers)
else:
json_data = self._session.get(endpoint_full_url, params=_params)
return self._object_factory('bpm_df2d278e89b45c8ea0ca0a945c001f08_v2_2_2_3', json_data)
def license_usage_details(self,
device_type,
smart_account_id,
virtual_account_name,
headers=None,
**request_parameters):
"""Get count of purchased and in use DNA and Network licenses. .
Args:
smart_account_id(basestring): smart_account_id path parameter. Id of smart account .
virtual_account_name(basestring): virtual_account_name path parameter. Name of virtual account. Putting
"All" will give license usage detail for all virtual accounts. .
device_type(basestring): device_type query parameter. Type of device like router, switch, wireless or
ise .
headers(dict): Dictionary of HTTP Headers to send with the Request
.
**request_parameters: Additional request parameters (provides
support for parameters that may be added in the future).
Returns:
MyDict: JSON response. Access the object's properties by using
the dot notation or the bracket notation.
Raises:
TypeError: If the parameter types are incorrect.
MalformedRequest: If the request body created is invalid.
ApiError: If the DNA Center cloud returns an error.
"""
check_type(headers, dict)
check_type(device_type, basestring,
may_be_none=False)
check_type(smart_account_id, basestring,
may_be_none=False)
check_type(virtual_account_name, basestring,
may_be_none=False)
if headers is not None:
if 'X-Auth-Token' in headers:
check_type(headers.get('X-Auth-Token'),
basestring, may_be_none=False)
_params = {
'device_type':
device_type,
}
_params.update(request_parameters)
_params = dict_from_items_with_values(_params)
path_params = {
'smart_account_id': smart_account_id,
'virtual_account_name': virtual_account_name,
}
with_custom_headers = False
_headers = self._session.headers or {}
if headers:
_headers.update(dict_of_str(headers))
with_custom_headers = True
e_url = ('/dna/intent/api/v1/licenses/usage/smartAccount/{smart_ac'
+ 'count_id}/virtualAccount/{virtual_account_name}')
endpoint_full_url = apply_path_params(e_url, path_params)
if with_custom_headers:
json_data = self._session.get(endpoint_full_url, params=_params,
headers=_headers)
else:
json_data = self._session.get(endpoint_full_url, params=_params)
return self._object_factory('bpm_e55ecbbda454c6a01d905e6f4cce16_v2_2_2_3', json_data)
| 40.68268
| 115
| 0.590771
| 3,450
| 32,180
| 5.233333
| 0.087536
| 0.026918
| 0.039878
| 0.020936
| 0.837884
| 0.812074
| 0.810579
| 0.79856
| 0.786984
| 0.774522
| 0
| 0.014962
| 0.345774
| 32,180
| 790
| 116
| 40.734177
| 0.842635
| 0.329149
| 0
| 0.787879
| 0
| 0
| 0.100305
| 0.06314
| 0
| 0
| 0
| 0
| 0
| 1
| 0.02381
| false
| 0
| 0.012987
| 0
| 0.060606
| 0.002165
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 7
|
258ddfe521628609f864161878d3d9bec54ef05a
| 51,455
|
py
|
Python
|
models.py
|
jonasvj/TFDE
|
c5d25947b28524c7a40626f797ca8c157fa70a53
|
[
"MIT"
] | null | null | null |
models.py
|
jonasvj/TFDE
|
c5d25947b28524c7a40626f797ca8c157fa70a53
|
[
"MIT"
] | null | null | null |
models.py
|
jonasvj/TFDE
|
c5d25947b28524c7a40626f797ca8c157fa70a53
|
[
"MIT"
] | null | null | null |
# Imports
import pyro
import torch
import numpy as np
from torch import nn
import pyro.distributions as dist
from scipy.integrate import nquad
from sklearn.cluster import KMeans
from pyro.ops.indexing import Vindex
from pyro.nn import PyroModule, PyroParam
from torch.distributions import transform_to
import pyro.distributions.constraints as constraints
from pyro.infer import SVI, TraceEnum_ELBO, config_enumerate
from torch.utils.data import RandomSampler, BatchSampler
from sklearn.covariance import EmpiricalCovariance
class GaussianMixtureModel(PyroModule):
"""Gaussian Mixture Model following the CP formulation"""
def __init__(self, K, M, device='cpu'):
super().__init__()
self.K = K
self.M = M
self.device = device
self.kwargs = {'K': K, 'M': M, 'device': device}
self.train_losses = list()
self.val_losses = list()
self.weights = PyroParam(
(torch.ones(K, device=self.device) / K),
constraint=constraints.simplex)
self.random_init()
if 'cuda' in self.device:
self.cuda(self.device)
self.eval()
def random_init(self):
self.locs = PyroParam(
torch.randn(self.K, self.M, device=self.device),
constraint=constraints.real)
self.scales = PyroParam(
torch.rand(self.K, self.M, device=self.device),
constraint=constraints.positive)
@config_enumerate
def forward(self, data=None, n_samples=1000):
N, M = data.shape if data is not None else (n_samples, self.M)
# Empty tensor for data samples
x_sample = torch.empty(N, M, device=self.device)
with pyro.plate('data', size=N):
# Sample cluster/component
k = pyro.sample('k', dist.Categorical(self.weights))
for m in range(self.M):
# Observations of x_m
obs = data[:, m] if data is not None else None
# Sample x_m
x_sample[:, m] = pyro.sample(
f'x_{m}',
dist.Normal(loc=self.locs[k, m],
scale=self.scales[k, m]+1e-9),
obs=obs)
return x_sample
def guide(self, data):
pass
def fit_model(self, data, data_val=None, lr=3e-4, mb_size=512,
n_epochs=500, verbose=True, early_stopping=True):
N_train = len(data)
if data_val is not None:
N_val = len(data_val)
else:
early_stopping = False
# Variables for early-stopping
best_loss = np.inf
count = 0
adam = pyro.optim.Adam({"lr": lr})
svi = SVI(self, self.guide, adam, loss=TraceEnum_ELBO())
for epoch in range(n_epochs):
self.train()
mbs = BatchSampler(
RandomSampler(range(N_train)),
batch_size=mb_size,
drop_last=False)
loss = 0
for mb_idx in mbs:
loss += svi.step(data[mb_idx])
self.train_losses.append(loss/N_train)
if data_val is not None:
self.eval()
val_loss = self.nllh(data_val) / N_val
self.val_losses.append(val_loss)
if epoch % 10 == 0 and verbose:
print('[epoch {}] loss: {:.4f}'.format(epoch, loss/N_train))
if early_stopping:
# Reset counter if val loss has improved by 0.1%
if val_loss < best_loss*(1 - 1e-3):
best_loss = val_loss
count = 0
else:
count += 1
# Break training loop if val loss has not improved in 10 epochs
if count == 10:
break
def init_from_data(self, data, k_means=True):
N, M = data.shape
self.locs = PyroParam(
data[torch.multinomial(torch.ones(N) / N, self.K),],
constraint=constraints.real)
self.scales = PyroParam(
data.std(dim=0).repeat(self.K).reshape(self.K, self.M),
constraint=constraints.positive)
if k_means:
k_means_model = KMeans(self.K)
k_means_model.fit(data.cpu().numpy())
locs = k_means_model.cluster_centers_
self.locs = PyroParam(
torch.tensor(locs, device=self.device),
constraint=constraints.real)
def log_density(self, data):
with torch.no_grad():
data = data.unsqueeze(1) # N x 1 x M
log_weights = torch.log(self.weights) # K
log_probs = dist.Normal(
loc=self.locs,
scale=self.scales).log_prob(data).sum(dim=-1) # N x K x M
#log_probs = log_probs.sum(dim=-1) # N x K
return torch.logsumexp(log_weights + log_probs, dim=-1) # N
def nllh(self, data):
with torch.no_grad():
log_density = self.log_density(data)
return -torch.sum(log_density).item()
def density(self, data):
with torch.no_grad():
N, M = data.shape
if M != self.M:
raise ValueError('Incorrect number of data columns.')
log_probs = dist.Normal(
loc=self.locs,
scale=self.scales).log_prob(data.unsqueeze(1))
density = (
torch.exp(log_probs.sum(dim=2)) * self.weights).sum(dim=1)
return density
def log_likelihood(self, data):
with torch.no_grad():
llh = torch.log(self.density(data)).sum()
return llh
def eval_density_grid(self, n_points=100, grid=[-5, 5, -5, 5]):
x_range = np.linspace(grid[0], grid[1], n_points)
y_range = np.linspace(grid[2], grid[3], n_points)
X1, X2 = np.meshgrid(x_range, y_range)
XX = np.column_stack((X1.ravel(), X2.ravel()))
densities = self.density(
torch.tensor(XX, device=self.device)).cpu().numpy()
return (x_range, y_range), densities.reshape((n_points, n_points))
class CPModel(PyroModule):
"""CP model where the distribution on each
variable can be specified"""
def __init__(self, K, distributions, device='cpu'):
super().__init__()
self.K = K
self.distributions = distributions
self.device = device
self.kwargs = {'K': K, 'distributions': distributions, 'device': device}
self.M = len(distributions)
self.train_losses = list()
self.weights = PyroParam(
torch.ones(self.K, device=self.device) / self.K,
constraint=constraints.simplex)
self.init_params()
if 'cuda' in self.device:
self.cuda(self.device)
self.eval()
def init_params(self):
self.components = nn.ModuleList()
for m, d in enumerate(self.distributions):
# Create pyro module for attribute m (i.e. x_m)
component = PyroModule(name=str(m))
# Distribution of x_m
component.dist = d
# List for parameter names of distribution
component.params = list()
# Parameters and constraints of distribution
for param, constr in d.arg_constraints.items():
# Initialize parameter and set as an attribute of the component
# Example:
# component.loc = PyroParam(init_tensor, constraint=constraints.real)
init_tensor = transform_to(constr)(
torch.empty(self.K, device=self.device).uniform_(-1, 1))
setattr(component, param, PyroParam(init_tensor, constraint=constr))
# Add parameter name to list
component.params.append(param)
# Add component to module list
self.components.append(component)
@config_enumerate
def forward(self, data=None, n_samples=1000):
N, M = data.shape if data is not None else (n_samples, self.M)
if M != self.M:
raise ValueError('Incorrect number of data columns.')
# Empty tensor for samples
x_sample = torch.empty(N, M, device=self.device)
with pyro.plate('data', N):
# Draw cluster/component
k = pyro.sample('k', dist.Categorical(self.weights))
for m in range(M):
# Observations of x_m
obs = data[:, m] if data is not None else None
# Parameters for distribution of x_m
params = {param: getattr(self.components[m], param)[k]
for param in self.components[m].params}
# Draw samples of x_m
x_sample[:, m] = pyro.sample(
f'x_{m}',
self.components[m].dist(**params),
obs=obs)
return x_sample
def guide(self, data):
pass
def fit_model(self, data, lr=3e-4, n_steps=10000, verbose=True):
self.train()
adam = pyro.optim.Adam({"lr": lr})
svi = SVI(self, self.guide, adam, loss=TraceEnum_ELBO())
for step in range(n_steps):
loss = svi.step(data)
self.train_losses.append(loss)
if step % 1000 == 0 and verbose:
print('[iter {}] loss: {:.4f}'.format(step, loss))
self.eval()
def hot_start(self, data, n_starts=100):
seeds = torch.multinomial(
torch.ones(10000) / 10000, num_samples=n_starts)
inits = list()
for seed in seeds:
pyro.set_rng_seed(seed)
pyro.clear_param_store()
# Set new initial parameters
self.init_params()
# Get initial loss
self.fit_model(data, lr=0, n_steps=1, verbose=False)
loss = self.train_losses[-1]
# Save loss and seed
inits.append((loss, seed))
# Reset train losses
self.train_losses = list()
# Best initialization
_, best_seed = min(inits)
# Initialize with best seed
pyro.set_rng_seed(best_seed)
pyro.clear_param_store()
self.init_params()
def density(self, data):
with torch.no_grad():
N, M = data.shape
if M != self.M:
raise ValueError('Incorrect number of data columns.')
log_probs = torch.empty(N, self.K, M, device=self.device)
for m in range(M):
params = {param: getattr(self.components[m], param)
for param in self.components[m].params}
log_probs[:, :, m] = self.components[m].dist(
**params).log_prob(data[:, m].unsqueeze(1))
density = (
torch.exp(log_probs.sum(dim=2)) * self.weights).sum(dim=1)
return density
def log_likelihood(self, data):
with torch.no_grad():
llh = torch.log(self.density(data)).sum()
return llh
def eval_density_grid(self, n_points=100):
x_range = np.linspace(-5, 5, n_points)
X1, X2 = np.meshgrid(x_range, x_range)
XX = np.column_stack((X1.ravel(), X2.ravel()))
densities = self.density(
torch.tensor(XX, device=self.device)).cpu().numpy()
return x_range, densities.reshape((n_points, n_points))
def unit_test(self, int_limits, opts=dict()):
"""Integrates probablity density"""
return nquad(
lambda *args: self.density(
torch.tensor([args], device=self.device)).item(),
int_limits, opts=opts)
class TensorTrain(PyroModule):
"""Tensor Train model"""
def __init__(self, Ks, device='cpu'):
super().__init__()
self.Ks = Ks
self.device = device
self.kwargs = {'Ks': Ks, 'device': device}
self.M = len(self.Ks) - 1
self.train_losses = list()
self.val_losses = list()
# Weights for latent variable k_0
self.k0_weights = PyroParam(
torch.ones(self.Ks[0], device=self.device) / self.Ks[0],
constraint=dist.constraints.simplex)
# Parameters indexed by latent variable number
# I.e. by 1, 2,..., M
self.params = nn.ModuleDict()
# Intialize weights, locs and scales
self.init_params()
if 'cuda' in self.device:
self.cuda(self.device)
if len(self.Ks) > 60:
self.forward = self.forward_alt
def init_params(self, loc_min=None, loc_max=None, scale_max=None):
if loc_min is None:
loc_min = [-1 for m in range(self.M)]
if loc_max is None:
loc_max = [1 for m in range(self.M)]
if scale_max is None:
scale_max = [1 for m in range(self.M)]
for m in range(1, self.M + 1):
# PyroModule for storing weights, locs
# and scales for x_m
module = PyroModule(name=f'x_{m}')
param_shape = (self.Ks[m-1], self.Ks[m])
# Weight matrix W^m ("Transition probabilities")
# I.e. probability of k_m = a given k_{m-1} = b
module.weights = PyroParam(
torch.ones(param_shape, device=self.device) / self.Ks[m],
constraint=constraints.simplex)
# Locs for x_m
module.locs = PyroParam(
dist.Uniform(loc_min[m-1]-1e-8, loc_max[m-1]+1e-8).sample(
param_shape).to(self.device),
constraint=constraints.real)
# Scales for x_m
module.scales = PyroParam(
dist.Uniform(1e-8, scale_max[m-1]+1e-7).sample(
param_shape).to(self.device),
constraint=constraints.positive)
self.params[str(m)] = module
@config_enumerate
def forward(self, data=None, n_samples=1000):
N, M = data.shape if data is not None else (n_samples, self.M)
# Empty tensor for data samples
x_sample = torch.empty(N, M, device=self.device)
with pyro.plate('data', size=N):
# Sample k_0
k_m_prev = pyro.sample(
'k_0',
dist.Categorical(self.k0_weights))
for m, params in enumerate(self.params.values()):
# Sample k_m
k_m = pyro.sample(
f'k_{m + 1}',
dist.Categorical(Vindex(params.weights)[k_m_prev]))
# Observations of x_m
obs = data[:, m] if data is not None else None
# Sample x_m
x_sample[:, m] = pyro.sample(
f'x_{m + 1}',
dist.Normal(loc=Vindex(params.locs)[k_m_prev, k_m],
scale=Vindex(params.scales)[k_m_prev, k_m]+1e-9),
obs=obs)
k_m_prev = k_m
return x_sample
@config_enumerate
def forward_alt(self, data=None, n_samples=1000):
N, M = data.shape if data is not None else (n_samples, self.M)
# Empty tensor for data samples
x_sample = torch.empty(N, M, device=self.device)
with pyro.plate('data', size=N):
# Sample k_0
k_m_prev = pyro.sample(
'k_0',
dist.Categorical(self.k0_weights))
for m in pyro.markov(range(self.M)):
params = self.params[str(m +1)]
# Sample k_m
k_m = pyro.sample(
f'k_{m + 1}',
dist.Categorical(Vindex(params.weights)[k_m_prev]))
# Observations of x_m
obs = data[:, m] if data is not None else None
# Sample x_m
x_sample[:, m] = pyro.sample(
f'x_{m + 1}',
dist.Normal(loc=Vindex(params.locs)[k_m_prev, k_m],
scale=Vindex(params.scales)[k_m_prev, k_m]+1e-9),
obs=obs)
k_m_prev = k_m
return x_sample
def guide(self, data):
pass
def fit_model(self, data, data_val=None, lr=3e-4, mb_size=512,
n_epochs=500, verbose=True, early_stopping=True):
N_train = len(data)
if data_val is not None:
N_val = len(data_val)
else:
early_stopping = False
# Variables for early-stopping
best_loss = np.inf
count = 0
adam = pyro.optim.Adam({"lr": lr})
svi = SVI(self, self.guide, adam, loss=TraceEnum_ELBO())
for epoch in range(n_epochs):
self.train()
mbs = BatchSampler(
RandomSampler(range(N_train)),
batch_size=mb_size,
drop_last=False)
loss = 0
for mb_idx in mbs:
loss += svi.step(data[mb_idx])
self.train_losses.append(loss/N_train)
if data_val is not None:
self.eval()
val_loss = self.nllh(data_val) / N_val
self.val_losses.append(val_loss)
if epoch % 10 == 0 and verbose:
print('[epoch {}] loss: {:.4f}'.format(epoch, loss/N_train))
if early_stopping:
# Reset counter if val loss has improved by 0.1%
if val_loss < best_loss*(1 - 1e-3):
best_loss = val_loss
count = 0
else:
count += 1
# Break training loop if val loss has not improved in 10 epochs
if count == 10:
break
def hot_start(self, data, subsample_size=None, n_starts=100):
seeds = torch.multinomial(
torch.ones(10000) / 10000, num_samples=n_starts)
inits = list()
data_min = data.min(dim=0).values
data_max = data.max(dim=0).values
data_std = data.std(dim=0)
if subsample_size is not None:
subsample_idx = torch.randperm(len(data))[:subsample_size]
data = data[subsample_idx]
for seed in seeds:
pyro.set_rng_seed(seed)
pyro.clear_param_store()
# Set new initial parameters
self.init_params(loc_min=data_min,
loc_max=data_max,
scale_max=data_std)
# Get initial loss
self.fit_model(
data, lr=0, mb_size=len(data), n_epochs=1, verbose=False)
loss = self.train_losses[-1]
# Save loss and seed
inits.append((loss, seed))
# Reset train losses
self.train_losses = list()
# Best initialization
_, best_seed = min(inits)
# Initialize with best seed
pyro.set_rng_seed(best_seed)
pyro.clear_param_store()
self.init_params(loc_min=data_min,
loc_max=data_max,
scale_max=data_std)
def log_density(self, data):
with torch.no_grad():
N, M = data.shape
data = data.reshape(N, M, 1, 1) # N x M x 1 x 1
# Initialize log density
log_density = torch.zeros(1,1,1, device=self.device) # 1 x 1 x 1
for m, params in reversed(self.params.items()):
m = int(m) - 1
log_prob = dist.Normal(
params.locs, params.scales).log_prob(data[:, m]) # N x K_{m-1} x K_m
log_weights = torch.log(params.weights).unsqueeze(0) # 1 x K_{m-1} x K_m
log_density = torch.logsumexp(
log_weights + log_prob + log_density, dim=-1).unsqueeze(1) # N x 1 x K_{m-1}
log_density = log_density.squeeze(1) # N x K_0
log_k0_weights = torch.log(self.k0_weights) # K_0
return torch.logsumexp(log_k0_weights + log_density, dim=-1) # N
def nllh(self, data):
with torch.no_grad():
log_density = self.log_density(data)
return -torch.sum(log_density).item()
def density(self, data):
with torch.no_grad():
N, M = data.shape
# Intialize sum to neutral multiplier
sum_ = torch.ones(1, 1, device=self.device)
# Iterate in reverse order
# (to sum last latent variables first)
for m, params in reversed(self.params.items()):
# Modules are indexed by 1,2,...,M
# but data by 0,1,...,M-1
m = int(m) - 1
probs = torch.exp(dist.Normal(
loc=params.locs,
scale=params.scales).log_prob(data[:, m].reshape(-1, 1, 1)))
sum_ = (params.weights * probs * sum_.unsqueeze(1)).sum(-1)
density = (self.k0_weights * sum_).sum(-1)
return density
def log_likelihood(self, data):
with torch.no_grad():
llh = torch.log(self.density(data)).sum()
return llh
def eval_density_grid(self, n_points=100, grid=[-5, 5, -5, 5]):
x_range = np.linspace(grid[0], grid[1], n_points)
y_range = np.linspace(grid[2], grid[3], n_points)
X1, X2 = np.meshgrid(x_range, y_range)
XX = np.column_stack((X1.ravel(), X2.ravel()))
densities = self.density(
torch.tensor(XX, device=self.device)).cpu().numpy()
return (x_range, y_range), densities.reshape((n_points, n_points))
def unit_test(self, int_limits, opts=dict()):
"""Integrates probablity density"""
return nquad(
lambda *args: self.density(
torch.tensor([args], device=self.device)).item(),
int_limits, opts=opts)
def unit_test_alt(self, n_points=100, grid=[-5, 5, -5, 5]):
(x_range, y_range), density = self.eval_density_grid(n_points=n_points,
grid=grid)
density = density.sum()
height = (y_range[-1] - y_range[0])/len(y_range)
width = (x_range[-1] - x_range[0])/len(x_range)
scaled_density = density*width*height
return scaled_density
def eval_density_multi_grid(self, limits, n_points):
# Reshape limits
limits = torch.Tensor(limits).reshape(self.M, 2)
# Allocate interval ranges
grid_tensor = torch.zeros(self.M, n_points)
# Fill in interval ranges
for m, vals in enumerate(limits):
grid_tensor[m, :] = torch.linspace(vals[0], vals[1], n_points)
# Create meshgrid of points
mesh = torch.meshgrid([grid for grid in grid_tensor])
XX = torch.column_stack([x.ravel() for x in mesh])
# Evaluate densities
densities = self.density(
torch.tensor(XX, device=self.device)).cpu().numpy()
# Return ranges for M dimensions and all densities
if self.M == 2:
return (grid_tensor[0].numpy(), grid_tensor[1].numpy()), densities.reshape((n_points, n_points))
else:
return grid_tensor, densities
def unit_test_multidimensional(self, limits, n_points=1000):
grid_tensor, densities = self.eval_density_multi_grid(limits, n_points)
total_density = densities.sum()
if self.M == 2:
scaling = (grid_tensor[0][-1]-grid_tensor[0][0])*(grid_tensor[1][-1]-grid_tensor[1][0])/(n_points**self.M)
else:
scaling = np.prod([(vals[-1]-vals[0]).item() for vals in grid_tensor])/(n_points**self.M)
scaled_density = total_density * scaling
return scaled_density
class GaussianMixtureModelFull(PyroModule):
"""Gaussian Mixture Model with full covariance matrix"""
def __init__(self, K, device='cpu'):
super().__init__()
self.device = device
self.M = len(K)-1 # To comply with other models initialisations
self.K = K[0] # To comply with other models initialisations
self.locs = None
self.scales = None # Variances
self.cov = None # Covariances
self.scale_tril = None # Covariance matrix
self.initialized = False
self.weights = PyroParam(
torch.ones(self.K, device=self.device) / self.K,
constraint=dist.constraints.simplex)
self.kwargs = {'K': self.K, 'M': self.M, 'device': device}
self.train_losses = list()
self.val_losses = list()
if 'cuda' in self.device:
self.cuda(self.device)
self.eval()
@config_enumerate
def forward(self, data=None):
N, M = data.shape if data is not None else (1000, 2)
if self.initialized is False and data is not None:
self.init_from_data(data)
elif self.initialized is False:
self.random_init(M)
if M != self.M:
raise ValueError('Incorrect number of data columns.')
# Combine covariances and variances
for k in range(self.K):
self.scale_tril[k, :, :] = torch.mm(self.scales[k,:].diag_embed(), self.cov[k, :, :])
# Empty tensor for data samples
x_sample = torch.empty(N, M)
with pyro.plate('data', N):
# Sample cluster/component
k = pyro.sample('k', dist.Categorical(self.weights))
x_sample = pyro.sample('x_sample', dist.MultivariateNormal(loc=self.locs[k], scale_tril=self.scale_tril[k]),
obs=data)
return x_sample
def init_from_data(self, data, k_means=False):
N, M = data.shape
self.M = M
self.locs = PyroParam(
data[torch.multinomial(torch.ones(N, device=self.device) / N, self.K),],
constraint=constraints.real, )
self.scales = PyroParam(
data.std(dim=0).repeat(self.K).reshape(self.K, self.M).to(self.device),
constraint=constraints.positive)
if k_means:
k_means_model = KMeans(self.K)
k_means_model.fit(data.cpu().numpy())
locs = k_means_model.cluster_centers_
self.locs = PyroParam(
torch.tensor(locs, device=self.device),
constraint=constraints.real)
# Initialisation of lower cholesky factorisation of covariance matrix
cov = torch.zeros(self.K, self.M, self.M)
for k in range(self.K):
cov[k, :, :] =dist.LKJCholesky(self.M).sample()
self.cov = PyroParam(cov.to(device=self.device), constraint=constraints.lower_cholesky, event_dim=2)
# Covariance matrix placeholder
self.scale_tril = torch.zeros(self.K, self.M, self.M, device=self.device)
# Combination of to lower lower triangular covariance matrix (scale_tril)
for k in range(self.K):
self.scale_tril[k, :, :] = torch.mm(self.scales[k,:].diag_embed(), self.cov[k, :, :])
self.initialized = True
def random_init(self, M):
self.M = M
self.locs = PyroParam(
torch.rand(self.K, self.M, device=self.device),
constraint=constraints.real)
self.scales = PyroParam(
torch.rand(self.K, self.M, device=self.device),
constraint=constraints.positive)
# Initialisation of lower cholesky factor of covariance matrix
cov = torch.zeros(self.K, self.M, self.M)
for k in range(self.K):
cov[k, :, :] =dist.LKJCholesky(self.M).sample()
self.cov = PyroParam(cov.to(device=self.device), constraint=constraints.lower_cholesky, event_dim=2)
# Covariance matrix placeholder
self.scale_tril = torch.zeros(self.K, self.M, self.M, device=self.device)
# Combination of to lower lower triangular covariance matrix (scale_tril)
for k in range(self.K):
self.scale_tril[k, :, :] = torch.mm(self.scales[k,:].diag_embed(), self.cov[k, :, :])
self.initialized = True
def guide(self, data):
pass
def fit_model(self, data, data_val=None, lr=3e-4, mb_size=512,
n_epochs=500, verbose=True, early_stopping=True):
N_train = len(data)
if data_val is not None:
N_val = len(data_val)
else:
early_stopping = False
# Variables for early-stopping
best_loss = np.inf
count = 0
adam = pyro.optim.Adam({"lr": lr})
svi = SVI(self, self.guide, adam, loss=TraceEnum_ELBO())
for epoch in range(n_epochs):
self.train()
mbs = BatchSampler(
RandomSampler(range(N_train)),
batch_size=mb_size,
drop_last=False)
loss = 0
for mb_idx in mbs:
loss += svi.step(data[mb_idx])
self.train_losses.append(loss/N_train)
if data_val is not None:
self.eval()
val_loss = self.nllh(data_val) / N_val
self.val_losses.append(val_loss)
if epoch % 10 == 0 and verbose:
print('[epoch {}] loss: {:.4f}'.format(epoch, loss/N_train))
if early_stopping:
# Reset counter if val loss has improved by 0.1%
if val_loss < best_loss*(1 - 1e-3):
best_loss = val_loss
count = 0
else:
count += 1
# Break training loop if val loss has not improved in 10 epochs
if count == 10:
break
def density(self, data):
with torch.no_grad():
N, M = data.shape
if M != self.M:
raise ValueError('Incorrect number of data columns.')
density = torch.zeros(data.shape[0], device=self.device)
for k in range(self.K):
density += self.weights[k]* torch.exp(dist.MultivariateNormal(loc=self.locs[k].double(),
scale_tril=self.scale_tril[k].double()).log_prob(data))
return density
def log_likelihood(self, data):
with torch.no_grad():
llh = torch.log(self.density(data)).sum()
return llh
def nllh(self, data):
return -self.log_likelihood(data)
def eval_density_grid(self, n_points=100, grid=[-5, 5, -5, 5]):
if self.M != 2:
raise ValueError('Can only evaluate density grid for 2-dimensional data.')
x_range = np.linspace(grid[0], grid[1], n_points)
y_range = np.linspace(grid[2], grid[3], n_points)
X1, X2 = np.meshgrid(x_range, y_range)
XX = np.column_stack((X1.ravel(), X2.ravel()))
densities = self.density(
torch.tensor(XX, device=self.device)).cpu().numpy()
return (x_range, y_range), densities.reshape((n_points, n_points))
def unit_test(self, int_limits, opts=dict()):
"""Integrates probablity density"""
return nquad(
lambda *args: self.density(
torch.tensor([args], device=self.device)).item(),
int_limits, opts=opts)
def unit_test_alt(self, n_points=100, grid=[-5, 5, -5, 5]):
(x_range, y_range), density = self.eval_density_grid(n_points=n_points,
grid=grid)
density = density.sum()
height = (y_range[-1] - y_range[0]) / len(y_range)
width = (x_range[-1] - x_range[0]) / len(x_range)
scaled_density = density * width * height
return scaled_density
def eval_density_multi_grid(self, limits, n_points):
# Reshape limits
limits = torch.Tensor(limits).reshape(self.M, 2)
# Allocate interval ranges
grid_tensor = torch.zeros(self.M, n_points, device=self.device)
# Fill in interval ranges
for m, vals in enumerate(limits):
grid_tensor[m, :] = torch.linspace(vals[0], vals[1], n_points)
# Create meshgrid of points
mesh = torch.meshgrid([grid for grid in grid_tensor])
XX = torch.column_stack([x.ravel() for x in mesh])
# Evaluate densities
densities = self.density(
torch.tensor(XX, device=self.device)).cpu().detach().numpy()
# Return ranges for M dimensions and all densities
if self.M == 2:
return (grid_tensor[0].cpu().numpy(), grid_tensor[1].cpu().numpy()), densities.reshape((n_points, n_points))
else:
return grid_tensor, densities
def unit_test_multidimensional(self, limits, n_points=1000):
grid_tensor, densities = self.eval_density_multi_grid(limits, n_points)
total_density = densities.sum()
if self.M == 2:
scaling = (grid_tensor[0][-1] - grid_tensor[0][0]) * (grid_tensor[1][-1] - grid_tensor[1][0]) / (
n_points ** self.M)
else:
scaling = np.prod([(vals[-1] - vals[0]).item() for vals in grid_tensor]) / (n_points ** self.M)
scaled_density = total_density * scaling
return scaled_density
class GaussianMixtureModelFullAlt(PyroModule):
"""Gaussian Mixture Model with full covariance matrix"""
def __init__(self, K, M, device='cpu'):
super().__init__()
self.device = device
self.K = K
self.M = M
self.kwargs = {'K': self.K, 'M': self.M, 'device': self.device}
self.train_losses = list()
self.val_losses = list()
if 'cuda' in self.device:
self.cuda(self.device)
self.random_init()
self.eval()
def random_init(self):
self.weights = PyroParam(
torch.ones(self.K, device=self.device) / self.K,
constraint=dist.constraints.simplex)
self.locs = PyroParam(
torch.rand(self.K, self.M, device=self.device),
constraint=constraints.real)
self.scale_tril = PyroParam(
dist.LKJCholesky(self.M).sample((self.K,)).to(self.device),
constraint=constraints.lower_cholesky)
@config_enumerate
def forward(self, data=None):
N, M = data.shape if data is not None else (1000, self.M)
with pyro.plate('data', N):
# Sample cluster/component
k = pyro.sample('k', dist.Categorical(self.weights))
x_sample = pyro.sample('x_sample',
dist.MultivariateNormal(
loc=Vindex(self.locs)[k],
scale_tril=Vindex(self.scale_tril)[k]),
obs=data)
return x_sample
def init_from_data(self, data):
data_np = data.cpu().numpy()
k_means_model = KMeans(self.K)
k_means_model.fit(data_np)
locs = k_means_model.cluster_centers_
labels = k_means_model.predict(data_np)
self.locs = PyroParam(
torch.tensor(locs, device=self.device),
constraint=constraints.real)
weights = torch.zeros(self.K)
scale_tril = torch.zeros(self.K, self.M, self.M)
for k in range(self.K):
weights[k] = np.sum(labels==k) / len(data_np)
cov = EmpiricalCovariance(store_precision=False).fit(data_np[labels==k])
try:
L = np.linalg.cholesky(cov.covariance_)
except np.linalg.LinAlgError:
L = np.linalg.cholesky(np.eye(self.M))
scale_tril[k,:,:] = torch.tensor(L)
self.weights = PyroParam(
weights.to(self.device),
constraint=dist.constraints.simplex)
self.scale_tril = PyroParam(
scale_tril.to(self.device),
constraint=constraints.lower_cholesky)
def guide(self, data):
pass
def fit_model(self, data, data_val=None, lr=3e-4, mb_size=512,
n_epochs=500, verbose=True, early_stopping=True):
N_train = len(data)
if data_val is not None:
N_val = len(data_val)
else:
early_stopping = False
# Variables for early-stopping
best_loss = np.inf
count = 0
adam = pyro.optim.Adam({"lr": lr})
svi = SVI(self, self.guide, adam, loss=TraceEnum_ELBO())
for epoch in range(n_epochs):
self.train()
mbs = BatchSampler(
RandomSampler(range(N_train)),
batch_size=mb_size,
drop_last=False)
loss = 0
for mb_idx in mbs:
loss += svi.step(data[mb_idx])
self.train_losses.append(loss/N_train)
if data_val is not None:
self.eval()
val_loss = self.nllh(data_val) / N_val
self.val_losses.append(val_loss)
if epoch % 10 == 0 and verbose:
print('[epoch {}] loss: {:.4f}'.format(epoch, loss/N_train))
if early_stopping:
# Reset counter if val loss has improved by 0.1%
if val_loss < best_loss*(1 - 1e-3):
best_loss = val_loss
count = 0
else:
count += 1
# Break training loop if val loss has not improved in 10 epochs
if count == 10:
break
def log_density(self, data):
with torch.no_grad():
N, M = data.shape
data = data.reshape(N, 1, M)
log_weights = torch.log(self.weights)
log_prob = dist.MultivariateNormal(
loc=self.locs,
scale_tril=self.scale_tril).log_prob(data)
return torch.logsumexp(log_weights + log_prob, dim=-1)
def nllh(self, data):
with torch.no_grad():
log_density = self.log_density(data)
return -torch.sum(log_density).item()
def density(self, data):
with torch.no_grad():
N, M = data.shape
if M != self.M:
raise ValueError('Incorrect number of data columns.')
density = torch.zeros(data.shape[0], device=self.device)
for k in range(self.K):
density += self.weights[k]* torch.exp(dist.MultivariateNormal(loc=self.locs[k].double(),
scale_tril=self.scale_tril[k].double()).log_prob(data))
return density
def log_likelihood(self, data):
with torch.no_grad():
llh = torch.log(self.density(data)).sum()
return llh
def eval_density_grid(self, n_points=100, grid=[-5, 5, -5, 5]):
if self.M != 2:
raise ValueError('Can only evaluate density grid for 2-dimensional data.')
x_range = np.linspace(grid[0], grid[1], n_points)
y_range = np.linspace(grid[2], grid[3], n_points)
X1, X2 = np.meshgrid(x_range, y_range)
XX = np.column_stack((X1.ravel(), X2.ravel()))
densities = self.density(
torch.tensor(XX, device=self.device)).cpu().numpy()
return (x_range, y_range), densities.reshape((n_points, n_points))
def unit_test_alt(self, n_points=100, grid=[-5, 5, -5, 5]):
(x_range, y_range), density = self.eval_density_grid(n_points=n_points,
grid=grid)
density = density.sum()
height = (y_range[-1] - y_range[0]) / len(y_range)
width = (x_range[-1] - x_range[0]) / len(x_range)
scaled_density = density * width * height
return scaled_density
class TensorRing(PyroModule):
"""Tensor Train model"""
def __init__(self, Ks, device='cpu'):
super().__init__()
self.Ks = Ks
self.device = device
self.kwargs = {'Ks': Ks, 'device': device}
self.M = len(self.Ks) - 1
self.train_losses = list()
self.val_losses = list()
# Weights for latent variable k_0
self.k0_weights = PyroParam(
torch.ones(self.Ks[0], device=self.device) / self.Ks[0],
constraint=dist.constraints.simplex)
# Parameters indexed by latent variable number
# I.e. by 1, 2,..., M
self.params = nn.ModuleDict()
# Intialize weights, locs and scales
self.init_params()
if 'cuda' in self.device:
self.cuda(self.device)
if len(self.Ks) > 60:
self.forward = self.forward_alt
def init_params(self, loc_min=None, loc_max=None, scale_max=None):
if loc_min is None:
loc_min = [-1 for m in range(self.M)]
if loc_max is None:
loc_max = [1 for m in range(self.M)]
if scale_max is None:
scale_max = [1 for m in range(self.M)]
# Initialise parameters for the k_{1} to k_{M-1}. k_{M} is treated separately.
for m in range(1, self.M):
# PyroModule for storing weights, locs
# and scales for x_m
module = PyroModule(name=f'x_{m}')
param_shape = (self.Ks[m-1], self.Ks[m])
# Weight matrix W^m ("Transition probabilities")
# I.e. probability of k_m = a given k_{m-1} = b
module.weights = PyroParam(
torch.ones(param_shape, device=self.device) / self.Ks[m],
constraint=constraints.simplex)
# Locs for x_m
module.locs = PyroParam(
dist.Uniform(loc_min[m-1]-1e-8, loc_max[m-1]+1e-8).sample(
param_shape).to(self.device),
constraint=constraints.real)
# Scales for x_m
module.scales = PyroParam(
dist.Uniform(1e-8, scale_max[m-1]+1e-7).sample(
param_shape).to(self.device),
constraint=constraints.positive)
self.params[str(m)] = module
# Initialise parameters for k_{M}
module = PyroModule(name=f'x_{self.M}')
param_shape = (self.Ks[self.M-1], self.Ks[self.M])
# No weights for k_{M}
# Locs for x_m
module.locs = PyroParam(
dist.Uniform(loc_min[self.M-1]-1e-8, loc_max[self.M-1]+1e-8).sample(
param_shape).to(self.device),
constraint=constraints.real)
# Scales for x_m
module.scales = PyroParam(
dist.Uniform(1e-8, scale_max[self.M-1]+1e-7).sample(
param_shape).to(self.device),
constraint=constraints.positive)
self.params[str(self.M)] = module
@config_enumerate
def forward(self, data=None, n_samples=1000):
N, M = data.shape if data is not None else (n_samples, self.M)
# Empty tensor for data samples
x_sample = torch.empty(N, M, device=self.device)
with pyro.plate('data', size=N):
# Sample k_0
k_m_prev = pyro.sample(
'k_0',
dist.Categorical(self.k0_weights))
k_init = k_m_prev
for m, params in enumerate(self.params.values()):
# Observations of x_m
obs = data[:, m] if data is not None else None
if m == self.M-1:
k_m = k_init
else:
# Sample k_m
k_m = pyro.sample(
f'k_{m + 1}',
dist.Categorical(Vindex(params.weights)[k_m_prev]))
# Sample x_m
x_sample[:, m] = pyro.sample(
f'x_{m + 1}',
dist.Normal(loc=Vindex(params.locs)[k_m_prev, k_m],
scale=Vindex(params.scales)[k_m_prev, k_m]),
obs=obs)
k_m_prev = k_m
return x_sample
@config_enumerate
def forward_alt(self, data=None, n_samples=1000):
N, M = data.shape if data is not None else (n_samples, self.M)
# Empty tensor for data samples
x_sample = torch.empty(N, M, device=self.device)
with pyro.plate('data', size=N):
# Sample k_0
k_m_prev = pyro.sample(
'k_0',
dist.Categorical(self.k0_weights))
for m in pyro.markov(range(self.M)):
params = self.params[str(m +1)]
# Sample k_m
# Observations of x_m
obs = data[:, m] if data is not None else None
if m == self.M-1:
# Sample x_m
x_sample[:, m] = pyro.sample(
f'x_{m + 1}',
dist.Normal(loc=Vindex(params.locs)[k_m_prev, k_init],
scale=Vindex(params.scales)[k_m_prev, k_init]),
obs=obs)
else:
# Sample k_m
k_m = pyro.sample(
f'k_{m + 1}',
dist.Categorical(Vindex(params.weights)[k_m_prev]))
# Sample x_m
x_sample[:, m] = pyro.sample(
f'x_{m + 1}',
dist.Normal(loc=Vindex(params.locs)[k_m_prev, k_m],
scale=Vindex(params.scales)[k_m_prev, k_m]),
obs=obs)
k_m_prev = k_m
return x_sample
def guide(self, data):
pass
def fit_model(self, data, data_val=None, lr=3e-4, mb_size=512,
n_epochs=500, verbose=True):
N_train = len(data)
if data_val is not None:
N_val = len(data_val)
adam = pyro.optim.Adam({"lr": lr})
svi = SVI(self, self.guide, adam, loss=TraceEnum_ELBO())
for epoch in range(n_epochs):
self.train()
mbs = BatchSampler(
RandomSampler(range(N_train)),
batch_size=mb_size,
drop_last=False)
loss = 0
for mb_idx in mbs:
loss += svi.step(data[mb_idx])
self.train_losses.append(loss/N_train)
if data_val is not None:
self.eval()
self.val_losses.append(svi.evaluate_loss(data_val)/N_val)
if (epoch % 1000 == 0 or epoch==n_epochs) and verbose:
print('[epoch {}] loss: {:.4f}'.format(epoch, loss/N_train))
def hot_start(self, data, subsample_size=None, n_starts=100):
seeds = torch.multinomial(
torch.ones(10000) / 10000, num_samples=n_starts)
inits = list()
data_min = data.min(dim=0).values
data_max = data.max(dim=0).values
data_std = data.std(dim=0)
if subsample_size is not None:
subsample_idx = torch.randperm(len(data))[:subsample_size]
data = data[subsample_idx]
for seed in seeds:
pyro.set_rng_seed(seed)
pyro.clear_param_store()
# Set new initial parameters
self.init_params(loc_min=data_min,
loc_max=data_max,
scale_max=data_std)
# Get initial loss
self.fit_model(
data, lr=0, mb_size=len(data), n_epochs=1, verbose=False)
loss = self.train_losses[-1]
# Save loss and seed
inits.append((loss, seed))
# Reset train losses
self.train_losses = list()
# Best initialization
_, best_seed = min(inits)
# Initialize with best seed
pyro.set_rng_seed(best_seed)
pyro.clear_param_store()
self.init_params(loc_min=data_min,
loc_max=data_max,
scale_max=data_std)
def density(self, data):
with torch.no_grad():
N, M = data.shape
# Intialize sum to neutral multiplier
sum_ = torch.ones(1, 1, device=self.device)
# Iterate in reverse order
# (to sum last latent variables first)
for m, params in reversed(self.params.items()):
# Modules are indexed by 1,2,...,M
# but data by 0,1,...,M-1
m = int(m) - 1
if m == self.M-1:
end_params = params
continue
elif m == self.M-2:
logprobs = (dist.Normal(
loc=params.locs,
scale=params.scales).log_prob(data[:, m].reshape(-1, 1, 1)))
logprobs_prev = (dist.Normal(
loc=end_params.locs,
scale=end_params.scales).log_prob(data[:, m+1].reshape(-1, 1, 1)))
sum_ = (params.weights * torch.exp( logprobs + logprobs_prev) * sum_.unsqueeze(1)).sum(-1)
else:
probs = torch.exp(dist.Normal(
loc=params.locs,
scale=params.scales).log_prob(data[:, m].reshape(-1, 1, 1)))
sum_ = (params.weights * probs * sum_.unsqueeze(1)).sum(-1)
density = (self.k0_weights * sum_).sum(-1)
return density
def log_likelihood(self, data):
with torch.no_grad():
llh = (torch.log(self.density(data))).sum()
return llh
def eval_density_grid(self, n_points=100, grid=[-5, 5, -5, 5]):
x_range = np.linspace(grid[0], grid[1], n_points)
y_range = np.linspace(grid[2], grid[3], n_points)
X1, X2 = np.meshgrid(x_range, y_range)
XX = np.column_stack((X1.ravel(), X2.ravel()))
densities = self.density(
torch.tensor(XX, device=self.device)).cpu().numpy()
return (x_range, y_range), densities.reshape((n_points, n_points))
def unit_test(self, int_limits, opts=dict()):
"""Integrates probablity density"""
return nquad(
lambda *args: self.density(
torch.tensor([args], device=self.device)).item(),
int_limits, opts=opts)
def unit_test_alt(self, n_points=100, grid=[-5, 5, -5, 5]):
(x_range, y_range), density = self.eval_density_grid(n_points=n_points,
grid=grid)
density = density.sum()
height = (y_range[-1] - y_range[0])/len(y_range)
width = (x_range[-1] - x_range[0])/len(x_range)
scaled_density = density*width*height40
return scaled_density
def eval_density_multi_grid(self, limits, n_points):
# Reshape limits
limits = torch.Tensor(limits).reshape(self.M, 2)
# Allocate interval ranges
grid_tensor = torch.zeros(self.M, n_points)
# Fill in interval ranges
for m, vals in enumerate(limits):
grid_tensor[m, :] = torch.linspace(vals[0], vals[1], n_points)
# Create meshgrid of points
mesh = torch.meshgrid([grid for grid in grid_tensor])
XX = torch.column_stack([x.ravel() for x in mesh])
# Evaluate densities
densities = self.density(
torch.tensor(XX, device=self.device)).cpu().numpy()
# Return ranges for M dimensions and all densities
if self.M == 2:
return (grid_tensor[0].numpy(), grid_tensor[1].numpy()), densities.reshape((n_points, n_points))
else:
return grid_tensor, densities
def unit_test_multidimensional(self, limits, n_points=1000):
grid_tensor, densities = self.eval_density_multi_grid(limits, n_points)
total_density = densities.sum()
if self.M == 2:
scaling = (grid_tensor[0][-1]-grid_tensor[0][0])*(grid_tensor[1][-1]-grid_tensor[1][0])/(n_points**self.M)
else:
scaling = np.prod([(vals[-1]-vals[0]).item() for vals in grid_tensor])/(n_points**self.M)
scaled_density = total_density * scaling
return scaled_density
class TensorRingAlt(PyroModule):
"""Tensor Ring model"""
def __init__(self, Ks, device='cpu'):
super().__init__()
self.Ks = Ks
self.device = device
self.kwargs = {'Ks': Ks, 'device': device}
self.M = len(self.Ks)
self.train_losses = list()
self.val_losses = list()
# Weights for latent variable k_0
self.k0_weights = PyroParam(
torch.ones(self.Ks[0], device=self.device) / self.Ks[0],
constraint=dist.constraints.simplex)
# Parameters indexed by latent variable number
# I.e. by 1, 2,..., M
self.params = nn.ModuleDict()
# Intialize weights, locs and scales
self.init_params()
if 'cuda' in self.device:
self.cuda(self.device)
if len(self.Ks) > 60:
self.forward = self.forward_alt
def init_params(self, loc_min=None, loc_max=None, scale_max=None):
if loc_min is None:
loc_min = [-1 for m in range(self.M)]
if loc_max is None:
loc_max = [1 for m in range(self.M)]
if scale_max is None:
scale_max = [1 for m in range(self.M)]
# Initialize parameters for x_1, x_2,..., x_{M-1}
for m in range(1, self.M):
# PyroModule for storing weights, locs
# and scales for x_m
module = PyroModule(name=f'x_{m}')
param_shape = (self.Ks[m-1], self.Ks[m])
# Weight matrix W^m ("Transition probabilities")
# I.e. probability of k_m = a given k_{m-1} = b
module.weights = PyroParam(
torch.ones(param_shape, device=self.device) / self.Ks[m],
constraint=constraints.simplex)
# Locs for x_m
module.locs = PyroParam(
dist.Uniform(loc_min[m-1]-1e-8, loc_max[m-1]+1e-8).sample(
param_shape).to(self.device),
constraint=constraints.real)
# Scales for x_m
module.scales = PyroParam(
dist.Uniform(1e-8, scale_max[m-1]+1e-7).sample(
param_shape).to(self.device),
constraint=constraints.positive)
self.params[str(m)] = module
# Initialize parameters for x_M
module = PyroModule(name=f'x_{self.M}')
param_shape = (self.Ks[0], self.Ks[-1])
# Locs for x_M
module.locs = PyroParam(
dist.Uniform(loc_min[m-1]-1e-8, loc_max[m-1]+1e-8).sample(
param_shape).to(self.device),
constraint=constraints.real)
# Scales for x_M
module.scales = PyroParam(
dist.Uniform(1e-8, scale_max[m-1]+1e-7).sample(
param_shape).to(self.device),
constraint=constraints.positive)
self.params[str(self.M)] = module
@config_enumerate
def forward(self, data=None, n_samples=1000):
N, M = data.shape if data is not None else (n_samples, self.M)
# Empty tensor for data samples
x_sample = torch.empty(N, M, device=self.device)
with pyro.plate('data', size=N):
# Sample k_0
k_0 = pyro.sample(
'k_0',
dist.Categorical(self.k0_weights))
k_m_prev = k_0
for m, params in enumerate(self.params.values()):
# Sample k_m
if m < self.M - 1:
k_m = pyro.sample(
f'k_{m + 1}',
dist.Categorical(Vindex(params.weights)[k_m_prev]))
else:
k_m = k_m_prev
k_m_prev = k_0
# Observations of x_m
obs = data[:, m] if data is not None else None
# Sample x_m
x_sample[:, m] = pyro.sample(
f'x_{m + 1}',
dist.Normal(loc=Vindex(params.locs)[k_m_prev, k_m],
scale=Vindex(params.scales)[k_m_prev, k_m]+1e-9),
obs=obs)
k_m_prev = k_m
return x_sample
def guide(self, data):
pass
def fit_model(self, data, data_val=None, lr=3e-4, mb_size=512,
n_epochs=500, verbose=True, early_stopping=False):
N_train = len(data)
if data_val is not None:
N_val = len(data_val)
else:
early_stopping = False
# Variables for early-stopping
best_loss = np.inf
count = 0
adam = pyro.optim.Adam({"lr": lr})
svi = SVI(self, self.guide, adam, loss=TraceEnum_ELBO())
for epoch in range(n_epochs):
self.train()
mbs = BatchSampler(
RandomSampler(range(N_train)),
batch_size=mb_size,
drop_last=False)
loss = 0
for mb_idx in mbs:
loss += svi.step(data[mb_idx])
self.train_losses.append(loss/N_train)
if data_val is not None:
self.eval()
val_loss = svi.evaluate_loss(data_val) / N_val
#val_loss = self.nllh(data_val) / N_val
self.val_losses.append(val_loss)
if epoch % 10 == 0 and verbose:
print('[epoch {}] loss: {:.4f}'.format(epoch, loss/N_train))
if early_stopping:
# Reset counter if val loss has improved by 0.1%
if val_loss < best_loss*(1 - 1e-3):
best_loss = val_loss
count = 0
else:
count += 1
# Break training loop if val loss has not improved in 10 epochs
if count == 10:
break
def hot_start(self, data, subsample_size=None, n_starts=100):
seeds = torch.multinomial(
torch.ones(10000) / 10000, num_samples=n_starts)
inits = list()
data_min = data.min(dim=0).values
data_max = data.max(dim=0).values
data_std = data.std(dim=0)
if subsample_size is not None:
subsample_idx = torch.randperm(len(data))[:subsample_size]
data = data[subsample_idx]
for seed in seeds:
pyro.set_rng_seed(seed)
pyro.clear_param_store()
# Set new initial parameters
self.init_params(loc_min=data_min,
loc_max=data_max,
scale_max=data_std)
# Get initial loss
self.fit_model(
data, lr=0, mb_size=len(data), n_epochs=1, verbose=False)
loss = self.train_losses[-1]
# Save loss and seed
inits.append((loss, seed))
# Reset train losses
self.train_losses = list()
# Best initialization
_, best_seed = min(inits)
# Initialize with best seed
pyro.set_rng_seed(best_seed)
pyro.clear_param_store()
self.init_params(loc_min=data_min,
loc_max=data_max,
scale_max=data_std)
def log_density(self, data):
with torch.no_grad():
N, M = data.shape
data = data.reshape(N, M, 1, 1) # N x M x 1 x 1
# Initialize log density
locs = self.params[str(self.M)].locs
scales = self.params[str(self.M)].scales
log_density = dist.Normal(locs, scales).log_prob(data[:, M-1]) # N x K_0 x K_{M-1}
log_density = log_density.unsqueeze(2) # N x K_0 x 1 x K_{M-1}
for m in reversed(range(self.M-1)):
params = self.params[str(m+1)]
log_prob = dist.Normal(
params.locs, params.scales).log_prob(data[:, m]) # N x K_{m-1} x K_m
log_weights = torch.log(params.weights) # 1 x K_{m-1} x K_m
weighted_log_prob = (log_weights + log_prob).unsqueeze(1) # N x 1 x K_{m-1} x K_m
if m == 0:
weighted_log_prob = weighted_log_prob.permute(0, 2, 1, 3) # N x K_{m-1} x 1 x K_m
log_density = torch.logsumexp(
weighted_log_prob + log_density, dim=-1).unsqueeze(2) # N x K_0 x 1 x K_{m-1}
log_density = log_density.squeeze(-1).squeeze(-1) # N x K_0
log_k0_weights = torch.log(self.k0_weights) # K_0
return torch.logsumexp(log_k0_weights + log_density, dim=-1) # N
def nllh(self, data):
with torch.no_grad():
log_density = self.log_density(data)
return -torch.sum(log_density).item()
def eval_density_grid(self, n_points=100, grid=[-5, 5, -5, 5]):
x_range = np.linspace(grid[0], grid[1], n_points)
y_range = np.linspace(grid[2], grid[3], n_points)
X1, X2 = np.meshgrid(x_range, y_range)
XX = np.column_stack((X1.ravel(), X2.ravel()))
densities = self.log_density(
torch.tensor(XX, device=self.device)).cpu()
densities = torch.exp(densities).numpy()
return (x_range, y_range), densities.reshape((n_points, n_points))
def unit_test_alt(self, n_points=100, grid=[-5, 5, -5, 5]):
(x_range, y_range), density = self.eval_density_grid(n_points=n_points,
grid=grid)
density = density.sum()
height = (y_range[-1] - y_range[0])/len(y_range)
width = (x_range[-1] - x_range[0])/len(x_range)
scaled_density = density*width*height
return scaled_density
| 28.93982
| 111
| 0.64845
| 7,923
| 51,455
| 4.052505
| 0.046321
| 0.015417
| 0.026411
| 0.022206
| 0.900025
| 0.886134
| 0.874953
| 0.856453
| 0.841971
| 0.834621
| 0
| 0.018969
| 0.216247
| 51,455
| 1,778
| 112
| 28.93982
| 0.777202
| 0.107958
| 0
| 0.859428
| 0
| 0
| 0.017942
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0.073232
| false
| 0.005892
| 0.011785
| 0.000842
| 0.137205
| 0.005892
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 7
|
258f41311b6b278cd9cc4d092656fda2614383af
| 71,204
|
py
|
Python
|
src/menten_gcn/published/maguire_grattarola_2021.py
|
MentenAI/menten_gcn
|
bcc7642cb32ab4e60a97687de17c1aa2dc4b5421
|
[
"MIT"
] | 11
|
2020-12-15T15:36:47.000Z
|
2022-03-10T19:23:36.000Z
|
src/menten_gcn/published/maguire_grattarola_2021.py
|
MentenAI/menten_gcn
|
bcc7642cb32ab4e60a97687de17c1aa2dc4b5421
|
[
"MIT"
] | 2
|
2021-01-14T15:04:59.000Z
|
2021-01-14T19:24:00.000Z
|
src/menten_gcn/published/maguire_grattarola_2021.py
|
MentenAI/menten_gcn
|
bcc7642cb32ab4e60a97687de17c1aa2dc4b5421
|
[
"MIT"
] | null | null | null |
from menten_gcn import DataMaker
import menten_gcn.decorators as decs
import numpy as np
class Maguire_Grattarola_2021(DataMaker):
def __init__(self):
decorators = [decs.Sequence(), decs.CACA_dist(), decs.PhiPsiRadians(),
decs.ChiAngleDecorator(), decs.trRosettaEdges(),
decs.SequenceSeparation(), decs.RosettaJumpDecorator(rottype="euler"),
decs.RosettaHBondDecorator(),
decs.AbbreviatedRef2015Decorator_v0()]
DataMaker.__init__(self, decorators=decorators,
edge_distance_cutoff_A=15.0,
max_residues=30,
exclude_bbdec=False,
nbr_distance_cutoff_A=100,
dtype=np.float32)
def run_consistency_check(self):
N, F, S = self.get_N_F_S()
print(N, F, S)
assert N == 30
assert F == 46
assert S == 28
try:
from pyrosetta import pose_from_sequence
from menten_gcn import RosettaPoseWrapper
pose = pose_from_sequence("MENTENAI")
wrapped_pose = RosettaPoseWrapper(pose)
X, A, E, meta = self.generate_input_for_resid(wrapped_pose, 1)
def test(A, B):
np.testing.assert_array_equal(A, B, 3)
'''
import sys
np.set_printoptions(threshold=sys.maxsize)
print( repr( X ) )
print( repr( A ) )
print( repr( E ) )
print( repr( meta ) )
'''
test(meta, [1, 2, 3, 4, 5, 6, 7, 8])
expected_X = np.array([[1.0000000e+00, 0.0000000e+00, 0.0000000e+00, 0.0000000e+00,
0.0000000e+00, 0.0000000e+00, 0.0000000e+00, 0.0000000e+00,
0.0000000e+00, 0.0000000e+00, 0.0000000e+00, 1.0000000e+00,
0.0000000e+00, 0.0000000e+00, 0.0000000e+00, 0.0000000e+00,
0.0000000e+00, 0.0000000e+00, 0.0000000e+00, 0.0000000e+00,
0.0000000e+00, 3.1415927e+00, 3.1415927e+00, 8.3775802e-07,
1.0000000e+00, 5.2359877e-07, 1.0000000e+00, -5.7595867e-07,
1.0000000e+00, 0.0000000e+00, 0.0000000e+00, 0.0000000e+00,
0.0000000e+00, 0.0000000e+00, 1.8672367e+03, 9.1478378e-01,
0.0000000e+00, 0.0000000e+00, 0.0000000e+00, 0.0000000e+00,
2.6757073e-01, 3.2645603e+01, 0.0000000e+00, 0.0000000e+00,
1.6573499e+00, 0.0000000e+00],
[0.0000000e+00, 0.0000000e+00, 0.0000000e+00, 0.0000000e+00,
1.0000000e+00, 0.0000000e+00, 0.0000000e+00, 0.0000000e+00,
0.0000000e+00, 0.0000000e+00, 0.0000000e+00, 0.0000000e+00,
0.0000000e+00, 0.0000000e+00, 0.0000000e+00, 0.0000000e+00,
0.0000000e+00, 0.0000000e+00, 0.0000000e+00, 0.0000000e+00,
0.0000000e+00, 3.1415927e+00, 3.1415927e+00, -2.2689281e-07,
1.0000000e+00, -5.2359876e-08, 1.0000000e+00, -1.3962634e-07,
1.0000000e+00, 0.0000000e+00, 0.0000000e+00, 0.0000000e+00,
0.0000000e+00, 0.0000000e+00, 5.3682019e+02, 2.2368712e+00,
0.0000000e+00, 0.0000000e+00, 0.0000000e+00, 0.0000000e+00,
2.6757073e-01, 3.9272125e+01, 1.9425745e+00, 0.0000000e+00,
-2.7245300e+00, 0.0000000e+00],
[0.0000000e+00, 0.0000000e+00, 0.0000000e+00, 0.0000000e+00,
0.0000000e+00, 0.0000000e+00, 0.0000000e+00, 0.0000000e+00,
0.0000000e+00, 0.0000000e+00, 0.0000000e+00, 0.0000000e+00,
1.0000000e+00, 0.0000000e+00, 0.0000000e+00, 0.0000000e+00,
0.0000000e+00, 0.0000000e+00, 0.0000000e+00, 0.0000000e+00,
0.0000000e+00, 3.1415927e+00, 3.1415927e+00, 6.6141240e-16,
1.0000000e+00, -1.2602073e-15, 1.0000000e+00, 0.0000000e+00,
0.0000000e+00, 0.0000000e+00, 0.0000000e+00, 0.0000000e+00,
0.0000000e+00, 0.0000000e+00, 1.0468760e+01, 3.8885200e-01,
0.0000000e+00, 0.0000000e+00, 0.0000000e+00, 0.0000000e+00,
2.6757073e-01, 2.6608192e+01, 6.7491651e-01, 0.0000000e+00,
-1.3402600e+00, 0.0000000e+00],
[0.0000000e+00, 0.0000000e+00, 0.0000000e+00, 0.0000000e+00,
0.0000000e+00, 0.0000000e+00, 0.0000000e+00, 0.0000000e+00,
0.0000000e+00, 0.0000000e+00, 0.0000000e+00, 0.0000000e+00,
0.0000000e+00, 0.0000000e+00, 0.0000000e+00, 0.0000000e+00,
0.0000000e+00, 1.0000000e+00, 0.0000000e+00, 0.0000000e+00,
0.0000000e+00, 3.1415927e+00, 3.1415927e+00, -1.3439035e-06,
1.0000000e+00, 5.9341193e-07, 1.0000000e+00, 0.0000000e+00,
0.0000000e+00, 0.0000000e+00, 0.0000000e+00, 0.0000000e+00,
0.0000000e+00, 0.0000000e+00, 2.5641568e+00, 4.0509410e-02,
0.0000000e+00, 0.0000000e+00, 0.0000000e+00, 0.0000000e+00,
2.6757073e-01, 5.9773567e+01, 1.7684269e+00, 0.0000000e+00,
1.1517500e+00, 0.0000000e+00],
[0.0000000e+00, 0.0000000e+00, 0.0000000e+00, 0.0000000e+00,
1.0000000e+00, 0.0000000e+00, 0.0000000e+00, 0.0000000e+00,
0.0000000e+00, 0.0000000e+00, 0.0000000e+00, 0.0000000e+00,
0.0000000e+00, 0.0000000e+00, 0.0000000e+00, 0.0000000e+00,
0.0000000e+00, 0.0000000e+00, 0.0000000e+00, 0.0000000e+00,
0.0000000e+00, 3.1415927e+00, 3.1415927e+00, -2.2689281e-07,
1.0000000e+00, -5.2359876e-08, 1.0000000e+00, -1.3962634e-07,
1.0000000e+00, 0.0000000e+00, 0.0000000e+00, 0.0000000e+00,
0.0000000e+00, 0.0000000e+00, 5.3682019e+02, 2.2368712e+00,
0.0000000e+00, 0.0000000e+00, 0.0000000e+00, 0.0000000e+00,
2.6757073e-01, 3.9272125e+01, 1.9425745e+00, 0.0000000e+00,
-2.7245300e+00, 0.0000000e+00],
[0.0000000e+00, 0.0000000e+00, 0.0000000e+00, 0.0000000e+00,
0.0000000e+00, 0.0000000e+00, 0.0000000e+00, 0.0000000e+00,
0.0000000e+00, 0.0000000e+00, 0.0000000e+00, 0.0000000e+00,
1.0000000e+00, 0.0000000e+00, 0.0000000e+00, 0.0000000e+00,
0.0000000e+00, 0.0000000e+00, 0.0000000e+00, 0.0000000e+00,
0.0000000e+00, 3.1415927e+00, 3.1415927e+00, -2.7057779e-16,
1.0000000e+00, 3.2313005e-16, 1.0000000e+00, 0.0000000e+00,
0.0000000e+00, 0.0000000e+00, 0.0000000e+00, 0.0000000e+00,
0.0000000e+00, 0.0000000e+00, 1.0468760e+01, 3.8885200e-01,
0.0000000e+00, 0.0000000e+00, 0.0000000e+00, 0.0000000e+00,
2.6757073e-01, 3.4736256e+01, 6.7491651e-01, 0.0000000e+00,
-1.3402600e+00, 0.0000000e+00],
[0.0000000e+00, 1.0000000e+00, 0.0000000e+00, 0.0000000e+00,
0.0000000e+00, 0.0000000e+00, 0.0000000e+00, 0.0000000e+00,
0.0000000e+00, 0.0000000e+00, 0.0000000e+00, 0.0000000e+00,
0.0000000e+00, 0.0000000e+00, 0.0000000e+00, 0.0000000e+00,
0.0000000e+00, 0.0000000e+00, 0.0000000e+00, 0.0000000e+00,
0.0000000e+00, 3.1415927e+00, 3.1415927e+00, 0.0000000e+00,
0.0000000e+00, 0.0000000e+00, 0.0000000e+00, 0.0000000e+00,
0.0000000e+00, 0.0000000e+00, 0.0000000e+00, 0.0000000e+00,
0.0000000e+00, 0.0000000e+00, 2.5678021e-01, 0.0000000e+00,
0.0000000e+00, 0.0000000e+00, 0.0000000e+00, 0.0000000e+00,
2.6757073e-01, 0.0000000e+00, 7.2029513e-01, 0.0000000e+00,
1.3246800e+00, 0.0000000e+00],
[0.0000000e+00, 0.0000000e+00, 0.0000000e+00, 0.0000000e+00,
0.0000000e+00, 0.0000000e+00, 0.0000000e+00, 0.0000000e+00,
1.0000000e+00, 0.0000000e+00, 0.0000000e+00, 0.0000000e+00,
0.0000000e+00, 0.0000000e+00, 0.0000000e+00, 0.0000000e+00,
0.0000000e+00, 0.0000000e+00, 0.0000000e+00, 0.0000000e+00,
0.0000000e+00, 3.1415927e+00, 3.1415927e+00, 1.9198622e-07,
1.0000000e+00, 1.1344640e-06, 1.0000000e+00, 0.0000000e+00,
0.0000000e+00, 0.0000000e+00, 0.0000000e+00, 0.0000000e+00,
0.0000000e+00, 0.0000000e+00, 2.9560727e+02, 3.5983735e-01,
0.0000000e+00, 0.0000000e+00, 0.0000000e+00, 0.0000000e+00,
0.0000000e+00, 6.2573467e+01, 0.0000000e+00, 0.0000000e+00,
2.3037400e+00, 0.0000000e+00],
[0.0000000e+00, 0.0000000e+00, 0.0000000e+00, 0.0000000e+00,
0.0000000e+00, 0.0000000e+00, 0.0000000e+00, 0.0000000e+00,
0.0000000e+00, 0.0000000e+00, 0.0000000e+00, 0.0000000e+00,
0.0000000e+00, 0.0000000e+00, 0.0000000e+00, 0.0000000e+00,
0.0000000e+00, 0.0000000e+00, 0.0000000e+00, 0.0000000e+00,
0.0000000e+00, 0.0000000e+00, 0.0000000e+00, 0.0000000e+00,
0.0000000e+00, 0.0000000e+00, 0.0000000e+00, 0.0000000e+00,
0.0000000e+00, 0.0000000e+00, 0.0000000e+00, 0.0000000e+00,
0.0000000e+00, 0.0000000e+00, 0.0000000e+00, 0.0000000e+00,
0.0000000e+00, 0.0000000e+00, 0.0000000e+00, 0.0000000e+00,
0.0000000e+00, 0.0000000e+00, 0.0000000e+00, 0.0000000e+00,
0.0000000e+00, 0.0000000e+00],
[0.0000000e+00, 0.0000000e+00, 0.0000000e+00, 0.0000000e+00,
0.0000000e+00, 0.0000000e+00, 0.0000000e+00, 0.0000000e+00,
0.0000000e+00, 0.0000000e+00, 0.0000000e+00, 0.0000000e+00,
0.0000000e+00, 0.0000000e+00, 0.0000000e+00, 0.0000000e+00,
0.0000000e+00, 0.0000000e+00, 0.0000000e+00, 0.0000000e+00,
0.0000000e+00, 0.0000000e+00, 0.0000000e+00, 0.0000000e+00,
0.0000000e+00, 0.0000000e+00, 0.0000000e+00, 0.0000000e+00,
0.0000000e+00, 0.0000000e+00, 0.0000000e+00, 0.0000000e+00,
0.0000000e+00, 0.0000000e+00, 0.0000000e+00, 0.0000000e+00,
0.0000000e+00, 0.0000000e+00, 0.0000000e+00, 0.0000000e+00,
0.0000000e+00, 0.0000000e+00, 0.0000000e+00, 0.0000000e+00,
0.0000000e+00, 0.0000000e+00],
[0.0000000e+00, 0.0000000e+00, 0.0000000e+00, 0.0000000e+00,
0.0000000e+00, 0.0000000e+00, 0.0000000e+00, 0.0000000e+00,
0.0000000e+00, 0.0000000e+00, 0.0000000e+00, 0.0000000e+00,
0.0000000e+00, 0.0000000e+00, 0.0000000e+00, 0.0000000e+00,
0.0000000e+00, 0.0000000e+00, 0.0000000e+00, 0.0000000e+00,
0.0000000e+00, 0.0000000e+00, 0.0000000e+00, 0.0000000e+00,
0.0000000e+00, 0.0000000e+00, 0.0000000e+00, 0.0000000e+00,
0.0000000e+00, 0.0000000e+00, 0.0000000e+00, 0.0000000e+00,
0.0000000e+00, 0.0000000e+00, 0.0000000e+00, 0.0000000e+00,
0.0000000e+00, 0.0000000e+00, 0.0000000e+00, 0.0000000e+00,
0.0000000e+00, 0.0000000e+00, 0.0000000e+00, 0.0000000e+00,
0.0000000e+00, 0.0000000e+00],
[0.0000000e+00, 0.0000000e+00, 0.0000000e+00, 0.0000000e+00,
0.0000000e+00, 0.0000000e+00, 0.0000000e+00, 0.0000000e+00,
0.0000000e+00, 0.0000000e+00, 0.0000000e+00, 0.0000000e+00,
0.0000000e+00, 0.0000000e+00, 0.0000000e+00, 0.0000000e+00,
0.0000000e+00, 0.0000000e+00, 0.0000000e+00, 0.0000000e+00,
0.0000000e+00, 0.0000000e+00, 0.0000000e+00, 0.0000000e+00,
0.0000000e+00, 0.0000000e+00, 0.0000000e+00, 0.0000000e+00,
0.0000000e+00, 0.0000000e+00, 0.0000000e+00, 0.0000000e+00,
0.0000000e+00, 0.0000000e+00, 0.0000000e+00, 0.0000000e+00,
0.0000000e+00, 0.0000000e+00, 0.0000000e+00, 0.0000000e+00,
0.0000000e+00, 0.0000000e+00, 0.0000000e+00, 0.0000000e+00,
0.0000000e+00, 0.0000000e+00],
[0.0000000e+00, 0.0000000e+00, 0.0000000e+00, 0.0000000e+00,
0.0000000e+00, 0.0000000e+00, 0.0000000e+00, 0.0000000e+00,
0.0000000e+00, 0.0000000e+00, 0.0000000e+00, 0.0000000e+00,
0.0000000e+00, 0.0000000e+00, 0.0000000e+00, 0.0000000e+00,
0.0000000e+00, 0.0000000e+00, 0.0000000e+00, 0.0000000e+00,
0.0000000e+00, 0.0000000e+00, 0.0000000e+00, 0.0000000e+00,
0.0000000e+00, 0.0000000e+00, 0.0000000e+00, 0.0000000e+00,
0.0000000e+00, 0.0000000e+00, 0.0000000e+00, 0.0000000e+00,
0.0000000e+00, 0.0000000e+00, 0.0000000e+00, 0.0000000e+00,
0.0000000e+00, 0.0000000e+00, 0.0000000e+00, 0.0000000e+00,
0.0000000e+00, 0.0000000e+00, 0.0000000e+00, 0.0000000e+00,
0.0000000e+00, 0.0000000e+00],
[0.0000000e+00, 0.0000000e+00, 0.0000000e+00, 0.0000000e+00,
0.0000000e+00, 0.0000000e+00, 0.0000000e+00, 0.0000000e+00,
0.0000000e+00, 0.0000000e+00, 0.0000000e+00, 0.0000000e+00,
0.0000000e+00, 0.0000000e+00, 0.0000000e+00, 0.0000000e+00,
0.0000000e+00, 0.0000000e+00, 0.0000000e+00, 0.0000000e+00,
0.0000000e+00, 0.0000000e+00, 0.0000000e+00, 0.0000000e+00,
0.0000000e+00, 0.0000000e+00, 0.0000000e+00, 0.0000000e+00,
0.0000000e+00, 0.0000000e+00, 0.0000000e+00, 0.0000000e+00,
0.0000000e+00, 0.0000000e+00, 0.0000000e+00, 0.0000000e+00,
0.0000000e+00, 0.0000000e+00, 0.0000000e+00, 0.0000000e+00,
0.0000000e+00, 0.0000000e+00, 0.0000000e+00, 0.0000000e+00,
0.0000000e+00, 0.0000000e+00],
[0.0000000e+00, 0.0000000e+00, 0.0000000e+00, 0.0000000e+00,
0.0000000e+00, 0.0000000e+00, 0.0000000e+00, 0.0000000e+00,
0.0000000e+00, 0.0000000e+00, 0.0000000e+00, 0.0000000e+00,
0.0000000e+00, 0.0000000e+00, 0.0000000e+00, 0.0000000e+00,
0.0000000e+00, 0.0000000e+00, 0.0000000e+00, 0.0000000e+00,
0.0000000e+00, 0.0000000e+00, 0.0000000e+00, 0.0000000e+00,
0.0000000e+00, 0.0000000e+00, 0.0000000e+00, 0.0000000e+00,
0.0000000e+00, 0.0000000e+00, 0.0000000e+00, 0.0000000e+00,
0.0000000e+00, 0.0000000e+00, 0.0000000e+00, 0.0000000e+00,
0.0000000e+00, 0.0000000e+00, 0.0000000e+00, 0.0000000e+00,
0.0000000e+00, 0.0000000e+00, 0.0000000e+00, 0.0000000e+00,
0.0000000e+00, 0.0000000e+00],
[0.0000000e+00, 0.0000000e+00, 0.0000000e+00, 0.0000000e+00,
0.0000000e+00, 0.0000000e+00, 0.0000000e+00, 0.0000000e+00,
0.0000000e+00, 0.0000000e+00, 0.0000000e+00, 0.0000000e+00,
0.0000000e+00, 0.0000000e+00, 0.0000000e+00, 0.0000000e+00,
0.0000000e+00, 0.0000000e+00, 0.0000000e+00, 0.0000000e+00,
0.0000000e+00, 0.0000000e+00, 0.0000000e+00, 0.0000000e+00,
0.0000000e+00, 0.0000000e+00, 0.0000000e+00, 0.0000000e+00,
0.0000000e+00, 0.0000000e+00, 0.0000000e+00, 0.0000000e+00,
0.0000000e+00, 0.0000000e+00, 0.0000000e+00, 0.0000000e+00,
0.0000000e+00, 0.0000000e+00, 0.0000000e+00, 0.0000000e+00,
0.0000000e+00, 0.0000000e+00, 0.0000000e+00, 0.0000000e+00,
0.0000000e+00, 0.0000000e+00],
[0.0000000e+00, 0.0000000e+00, 0.0000000e+00, 0.0000000e+00,
0.0000000e+00, 0.0000000e+00, 0.0000000e+00, 0.0000000e+00,
0.0000000e+00, 0.0000000e+00, 0.0000000e+00, 0.0000000e+00,
0.0000000e+00, 0.0000000e+00, 0.0000000e+00, 0.0000000e+00,
0.0000000e+00, 0.0000000e+00, 0.0000000e+00, 0.0000000e+00,
0.0000000e+00, 0.0000000e+00, 0.0000000e+00, 0.0000000e+00,
0.0000000e+00, 0.0000000e+00, 0.0000000e+00, 0.0000000e+00,
0.0000000e+00, 0.0000000e+00, 0.0000000e+00, 0.0000000e+00,
0.0000000e+00, 0.0000000e+00, 0.0000000e+00, 0.0000000e+00,
0.0000000e+00, 0.0000000e+00, 0.0000000e+00, 0.0000000e+00,
0.0000000e+00, 0.0000000e+00, 0.0000000e+00, 0.0000000e+00,
0.0000000e+00, 0.0000000e+00],
[0.0000000e+00, 0.0000000e+00, 0.0000000e+00, 0.0000000e+00,
0.0000000e+00, 0.0000000e+00, 0.0000000e+00, 0.0000000e+00,
0.0000000e+00, 0.0000000e+00, 0.0000000e+00, 0.0000000e+00,
0.0000000e+00, 0.0000000e+00, 0.0000000e+00, 0.0000000e+00,
0.0000000e+00, 0.0000000e+00, 0.0000000e+00, 0.0000000e+00,
0.0000000e+00, 0.0000000e+00, 0.0000000e+00, 0.0000000e+00,
0.0000000e+00, 0.0000000e+00, 0.0000000e+00, 0.0000000e+00,
0.0000000e+00, 0.0000000e+00, 0.0000000e+00, 0.0000000e+00,
0.0000000e+00, 0.0000000e+00, 0.0000000e+00, 0.0000000e+00,
0.0000000e+00, 0.0000000e+00, 0.0000000e+00, 0.0000000e+00,
0.0000000e+00, 0.0000000e+00, 0.0000000e+00, 0.0000000e+00,
0.0000000e+00, 0.0000000e+00],
[0.0000000e+00, 0.0000000e+00, 0.0000000e+00, 0.0000000e+00,
0.0000000e+00, 0.0000000e+00, 0.0000000e+00, 0.0000000e+00,
0.0000000e+00, 0.0000000e+00, 0.0000000e+00, 0.0000000e+00,
0.0000000e+00, 0.0000000e+00, 0.0000000e+00, 0.0000000e+00,
0.0000000e+00, 0.0000000e+00, 0.0000000e+00, 0.0000000e+00,
0.0000000e+00, 0.0000000e+00, 0.0000000e+00, 0.0000000e+00,
0.0000000e+00, 0.0000000e+00, 0.0000000e+00, 0.0000000e+00,
0.0000000e+00, 0.0000000e+00, 0.0000000e+00, 0.0000000e+00,
0.0000000e+00, 0.0000000e+00, 0.0000000e+00, 0.0000000e+00,
0.0000000e+00, 0.0000000e+00, 0.0000000e+00, 0.0000000e+00,
0.0000000e+00, 0.0000000e+00, 0.0000000e+00, 0.0000000e+00,
0.0000000e+00, 0.0000000e+00],
[0.0000000e+00, 0.0000000e+00, 0.0000000e+00, 0.0000000e+00,
0.0000000e+00, 0.0000000e+00, 0.0000000e+00, 0.0000000e+00,
0.0000000e+00, 0.0000000e+00, 0.0000000e+00, 0.0000000e+00,
0.0000000e+00, 0.0000000e+00, 0.0000000e+00, 0.0000000e+00,
0.0000000e+00, 0.0000000e+00, 0.0000000e+00, 0.0000000e+00,
0.0000000e+00, 0.0000000e+00, 0.0000000e+00, 0.0000000e+00,
0.0000000e+00, 0.0000000e+00, 0.0000000e+00, 0.0000000e+00,
0.0000000e+00, 0.0000000e+00, 0.0000000e+00, 0.0000000e+00,
0.0000000e+00, 0.0000000e+00, 0.0000000e+00, 0.0000000e+00,
0.0000000e+00, 0.0000000e+00, 0.0000000e+00, 0.0000000e+00,
0.0000000e+00, 0.0000000e+00, 0.0000000e+00, 0.0000000e+00,
0.0000000e+00, 0.0000000e+00],
[0.0000000e+00, 0.0000000e+00, 0.0000000e+00, 0.0000000e+00,
0.0000000e+00, 0.0000000e+00, 0.0000000e+00, 0.0000000e+00,
0.0000000e+00, 0.0000000e+00, 0.0000000e+00, 0.0000000e+00,
0.0000000e+00, 0.0000000e+00, 0.0000000e+00, 0.0000000e+00,
0.0000000e+00, 0.0000000e+00, 0.0000000e+00, 0.0000000e+00,
0.0000000e+00, 0.0000000e+00, 0.0000000e+00, 0.0000000e+00,
0.0000000e+00, 0.0000000e+00, 0.0000000e+00, 0.0000000e+00,
0.0000000e+00, 0.0000000e+00, 0.0000000e+00, 0.0000000e+00,
0.0000000e+00, 0.0000000e+00, 0.0000000e+00, 0.0000000e+00,
0.0000000e+00, 0.0000000e+00, 0.0000000e+00, 0.0000000e+00,
0.0000000e+00, 0.0000000e+00, 0.0000000e+00, 0.0000000e+00,
0.0000000e+00, 0.0000000e+00],
[0.0000000e+00, 0.0000000e+00, 0.0000000e+00, 0.0000000e+00,
0.0000000e+00, 0.0000000e+00, 0.0000000e+00, 0.0000000e+00,
0.0000000e+00, 0.0000000e+00, 0.0000000e+00, 0.0000000e+00,
0.0000000e+00, 0.0000000e+00, 0.0000000e+00, 0.0000000e+00,
0.0000000e+00, 0.0000000e+00, 0.0000000e+00, 0.0000000e+00,
0.0000000e+00, 0.0000000e+00, 0.0000000e+00, 0.0000000e+00,
0.0000000e+00, 0.0000000e+00, 0.0000000e+00, 0.0000000e+00,
0.0000000e+00, 0.0000000e+00, 0.0000000e+00, 0.0000000e+00,
0.0000000e+00, 0.0000000e+00, 0.0000000e+00, 0.0000000e+00,
0.0000000e+00, 0.0000000e+00, 0.0000000e+00, 0.0000000e+00,
0.0000000e+00, 0.0000000e+00, 0.0000000e+00, 0.0000000e+00,
0.0000000e+00, 0.0000000e+00],
[0.0000000e+00, 0.0000000e+00, 0.0000000e+00, 0.0000000e+00,
0.0000000e+00, 0.0000000e+00, 0.0000000e+00, 0.0000000e+00,
0.0000000e+00, 0.0000000e+00, 0.0000000e+00, 0.0000000e+00,
0.0000000e+00, 0.0000000e+00, 0.0000000e+00, 0.0000000e+00,
0.0000000e+00, 0.0000000e+00, 0.0000000e+00, 0.0000000e+00,
0.0000000e+00, 0.0000000e+00, 0.0000000e+00, 0.0000000e+00,
0.0000000e+00, 0.0000000e+00, 0.0000000e+00, 0.0000000e+00,
0.0000000e+00, 0.0000000e+00, 0.0000000e+00, 0.0000000e+00,
0.0000000e+00, 0.0000000e+00, 0.0000000e+00, 0.0000000e+00,
0.0000000e+00, 0.0000000e+00, 0.0000000e+00, 0.0000000e+00,
0.0000000e+00, 0.0000000e+00, 0.0000000e+00, 0.0000000e+00,
0.0000000e+00, 0.0000000e+00],
[0.0000000e+00, 0.0000000e+00, 0.0000000e+00, 0.0000000e+00,
0.0000000e+00, 0.0000000e+00, 0.0000000e+00, 0.0000000e+00,
0.0000000e+00, 0.0000000e+00, 0.0000000e+00, 0.0000000e+00,
0.0000000e+00, 0.0000000e+00, 0.0000000e+00, 0.0000000e+00,
0.0000000e+00, 0.0000000e+00, 0.0000000e+00, 0.0000000e+00,
0.0000000e+00, 0.0000000e+00, 0.0000000e+00, 0.0000000e+00,
0.0000000e+00, 0.0000000e+00, 0.0000000e+00, 0.0000000e+00,
0.0000000e+00, 0.0000000e+00, 0.0000000e+00, 0.0000000e+00,
0.0000000e+00, 0.0000000e+00, 0.0000000e+00, 0.0000000e+00,
0.0000000e+00, 0.0000000e+00, 0.0000000e+00, 0.0000000e+00,
0.0000000e+00, 0.0000000e+00, 0.0000000e+00, 0.0000000e+00,
0.0000000e+00, 0.0000000e+00],
[0.0000000e+00, 0.0000000e+00, 0.0000000e+00, 0.0000000e+00,
0.0000000e+00, 0.0000000e+00, 0.0000000e+00, 0.0000000e+00,
0.0000000e+00, 0.0000000e+00, 0.0000000e+00, 0.0000000e+00,
0.0000000e+00, 0.0000000e+00, 0.0000000e+00, 0.0000000e+00,
0.0000000e+00, 0.0000000e+00, 0.0000000e+00, 0.0000000e+00,
0.0000000e+00, 0.0000000e+00, 0.0000000e+00, 0.0000000e+00,
0.0000000e+00, 0.0000000e+00, 0.0000000e+00, 0.0000000e+00,
0.0000000e+00, 0.0000000e+00, 0.0000000e+00, 0.0000000e+00,
0.0000000e+00, 0.0000000e+00, 0.0000000e+00, 0.0000000e+00,
0.0000000e+00, 0.0000000e+00, 0.0000000e+00, 0.0000000e+00,
0.0000000e+00, 0.0000000e+00, 0.0000000e+00, 0.0000000e+00,
0.0000000e+00, 0.0000000e+00],
[0.0000000e+00, 0.0000000e+00, 0.0000000e+00, 0.0000000e+00,
0.0000000e+00, 0.0000000e+00, 0.0000000e+00, 0.0000000e+00,
0.0000000e+00, 0.0000000e+00, 0.0000000e+00, 0.0000000e+00,
0.0000000e+00, 0.0000000e+00, 0.0000000e+00, 0.0000000e+00,
0.0000000e+00, 0.0000000e+00, 0.0000000e+00, 0.0000000e+00,
0.0000000e+00, 0.0000000e+00, 0.0000000e+00, 0.0000000e+00,
0.0000000e+00, 0.0000000e+00, 0.0000000e+00, 0.0000000e+00,
0.0000000e+00, 0.0000000e+00, 0.0000000e+00, 0.0000000e+00,
0.0000000e+00, 0.0000000e+00, 0.0000000e+00, 0.0000000e+00,
0.0000000e+00, 0.0000000e+00, 0.0000000e+00, 0.0000000e+00,
0.0000000e+00, 0.0000000e+00, 0.0000000e+00, 0.0000000e+00,
0.0000000e+00, 0.0000000e+00],
[0.0000000e+00, 0.0000000e+00, 0.0000000e+00, 0.0000000e+00,
0.0000000e+00, 0.0000000e+00, 0.0000000e+00, 0.0000000e+00,
0.0000000e+00, 0.0000000e+00, 0.0000000e+00, 0.0000000e+00,
0.0000000e+00, 0.0000000e+00, 0.0000000e+00, 0.0000000e+00,
0.0000000e+00, 0.0000000e+00, 0.0000000e+00, 0.0000000e+00,
0.0000000e+00, 0.0000000e+00, 0.0000000e+00, 0.0000000e+00,
0.0000000e+00, 0.0000000e+00, 0.0000000e+00, 0.0000000e+00,
0.0000000e+00, 0.0000000e+00, 0.0000000e+00, 0.0000000e+00,
0.0000000e+00, 0.0000000e+00, 0.0000000e+00, 0.0000000e+00,
0.0000000e+00, 0.0000000e+00, 0.0000000e+00, 0.0000000e+00,
0.0000000e+00, 0.0000000e+00, 0.0000000e+00, 0.0000000e+00,
0.0000000e+00, 0.0000000e+00],
[0.0000000e+00, 0.0000000e+00, 0.0000000e+00, 0.0000000e+00,
0.0000000e+00, 0.0000000e+00, 0.0000000e+00, 0.0000000e+00,
0.0000000e+00, 0.0000000e+00, 0.0000000e+00, 0.0000000e+00,
0.0000000e+00, 0.0000000e+00, 0.0000000e+00, 0.0000000e+00,
0.0000000e+00, 0.0000000e+00, 0.0000000e+00, 0.0000000e+00,
0.0000000e+00, 0.0000000e+00, 0.0000000e+00, 0.0000000e+00,
0.0000000e+00, 0.0000000e+00, 0.0000000e+00, 0.0000000e+00,
0.0000000e+00, 0.0000000e+00, 0.0000000e+00, 0.0000000e+00,
0.0000000e+00, 0.0000000e+00, 0.0000000e+00, 0.0000000e+00,
0.0000000e+00, 0.0000000e+00, 0.0000000e+00, 0.0000000e+00,
0.0000000e+00, 0.0000000e+00, 0.0000000e+00, 0.0000000e+00,
0.0000000e+00, 0.0000000e+00],
[0.0000000e+00, 0.0000000e+00, 0.0000000e+00, 0.0000000e+00,
0.0000000e+00, 0.0000000e+00, 0.0000000e+00, 0.0000000e+00,
0.0000000e+00, 0.0000000e+00, 0.0000000e+00, 0.0000000e+00,
0.0000000e+00, 0.0000000e+00, 0.0000000e+00, 0.0000000e+00,
0.0000000e+00, 0.0000000e+00, 0.0000000e+00, 0.0000000e+00,
0.0000000e+00, 0.0000000e+00, 0.0000000e+00, 0.0000000e+00,
0.0000000e+00, 0.0000000e+00, 0.0000000e+00, 0.0000000e+00,
0.0000000e+00, 0.0000000e+00, 0.0000000e+00, 0.0000000e+00,
0.0000000e+00, 0.0000000e+00, 0.0000000e+00, 0.0000000e+00,
0.0000000e+00, 0.0000000e+00, 0.0000000e+00, 0.0000000e+00,
0.0000000e+00, 0.0000000e+00, 0.0000000e+00, 0.0000000e+00,
0.0000000e+00, 0.0000000e+00],
[0.0000000e+00, 0.0000000e+00, 0.0000000e+00, 0.0000000e+00,
0.0000000e+00, 0.0000000e+00, 0.0000000e+00, 0.0000000e+00,
0.0000000e+00, 0.0000000e+00, 0.0000000e+00, 0.0000000e+00,
0.0000000e+00, 0.0000000e+00, 0.0000000e+00, 0.0000000e+00,
0.0000000e+00, 0.0000000e+00, 0.0000000e+00, 0.0000000e+00,
0.0000000e+00, 0.0000000e+00, 0.0000000e+00, 0.0000000e+00,
0.0000000e+00, 0.0000000e+00, 0.0000000e+00, 0.0000000e+00,
0.0000000e+00, 0.0000000e+00, 0.0000000e+00, 0.0000000e+00,
0.0000000e+00, 0.0000000e+00, 0.0000000e+00, 0.0000000e+00,
0.0000000e+00, 0.0000000e+00, 0.0000000e+00, 0.0000000e+00,
0.0000000e+00, 0.0000000e+00, 0.0000000e+00, 0.0000000e+00,
0.0000000e+00, 0.0000000e+00]], dtype=np.float32)
expected_E = np.array([[np.zeros(shape=(S)),
[1.00000000e+00, 3.80209351e+00, 4.73696232e+00,
2.55590057e+00, 1.94406962e+00, 3.78623873e-01, 0, 2.52958393e+00, 2.83850670e+00,
1.11976581e-17, 3.14159274e+00, 2.22044605e-16,
1.10479271e+00, 0, 0, 0, 0, 0,
-4.57205915e+00, 8.80855896e+02, 7.82728672e+00,
3.35431993e-01, -1.00192909e+01, 0, 0, 0, 0, 0],
[0, 7.28560734e+00, 7.28560686e+00,
1.16697186e-06, 2.57872105e+00, 9.72977459e-01,
6.93147182e-01, 6.20201683e+00, 3.82296467e+00,
6.42421180e-16, -5.30027373e-16, -2.22044605e-16,
-1.79768915e-06, 0, 0, 0, 0, 0,
-1.92229003e-01, 0, 2.19786674e-01,
-5.78640960e-02, -3.56009841e-01, 0, 0, 0, 0, 0],
[0, 1.09825363e+01, 1.13401499e+01,
3.00454664e+00, 2.43993378e+00, 7.23135471e-01,
1.09861231e+00, 8.73160267e+00, 6.66147280e+00,
-2.44219418e-17, 3.14159274e+00, 4.44089210e-16,
1.10479355e+00, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0],
[0, 1.45712175e+01, 1.45712175e+01,
9.06385296e-08, 2.57872105e+00, 9.72977459e-01,
1.38629436e+00, 1.24040346e+01, 7.64593363e+00,
1.28483897e-15, -1.06005580e-15, -6.66133815e-16,
-1.39626337e-07, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0],
np.zeros(shape=(S)), np.zeros(shape=(S)), np.zeros(shape=(S)), np.zeros(shape=(S)),
np.zeros(shape=(S)), np.zeros(shape=(S)), np.zeros(shape=(S)), np.zeros(shape=(S)),
np.zeros(shape=(S)), np.zeros(shape=(S)), np.zeros(shape=(S)), np.zeros(shape=(S)),
np.zeros(shape=(S)), np.zeros(shape=(S)), np.zeros(shape=(S)), np.zeros(shape=(S)),
np.zeros(shape=(S)), np.zeros(shape=(S)), np.zeros(shape=(S)), np.zeros(shape=(S)),
np.zeros(shape=(S)), np.zeros(shape=(S)), np.zeros(shape=(S)), np.zeros(shape=(S)),
np.zeros(shape=(S))],
[[1.00000000e+00, 3.80209351e+00, 4.73696232e+00,
2.55590057e+00, -3.41464520e-01, 1.50724781e+00, 0, -3.67243147e+00, -9.84460473e-01,
4.68176749e-17, -3.14159274e+00, -2.22044605e-16,
1.10479271e+00, 0, 0, 0, 0, 0,
-4.57205915e+00, 8.80855896e+02, 7.82728672e+00,
3.35431993e-01, -1.00192909e+01, 0, 0, 0, 0, 0],
np.zeros(shape=(S)),
[1.00000000e+00, 3.80209446e+00, 4.73696232e+00,
2.55590081e+00, 1.94406879e+00, 3.78623903e-01, 0, 2.52958250e+00, 2.83850908e+00,
1.11970799e-17, 3.14159274e+00, 2.22044605e-16,
1.10479450e+00, 0, 0, 0, 0, 0,
-2.22846270e+00, 2.04876663e+02, 2.69857574e+00,
-1.21587858e-01, -1.10309076e+00, 0, 0, 0, 0, 0],
[0, 7.28560972e+00, 7.28560925e+00,
5.55161478e-07, 2.57872009e+00, 9.72977221e-01,
6.93147182e-01, 6.20201588e+00, 3.82297087e+00,
6.42420386e-16, -5.30027532e-16, -2.22044605e-16,
-8.55211340e-07, 0, 0, 0, 0, 0,
-7.39678815e-02, 0, 1.09441765e-01,
-1.31108221e-02, 1.17330207e-02, 0, 0, 0, 0, 0],
[0, 1.09825373e+01, 1.13401508e+01,
3.00454545e+00, 2.43993306e+00, 7.23135352e-01,
1.09861231e+00, 8.73160076e+00, 6.66147757e+00,
-2.44209194e-17, 3.14159274e+00, 4.44089210e-16,
1.10479283e+00, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0],
[0, 1.45712175e+01, 1.45712166e+01,
1.07633355e-06, 2.57872057e+00, 9.72977340e-01,
1.38629436e+00, 1.24040337e+01, 7.64593601e+00,
1.28484437e-15, -1.06005464e-15, -6.66133815e-16,
-1.65806284e-06, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0],
np.zeros(shape=(S)), np.zeros(shape=(S)), np.zeros(shape=(S)), np.zeros(shape=(S)),
np.zeros(shape=(S)), np.zeros(shape=(S)), np.zeros(shape=(S)), np.zeros(shape=(S)),
np.zeros(shape=(S)), np.zeros(shape=(S)), np.zeros(shape=(S)), np.zeros(shape=(S)),
np.zeros(shape=(S)), np.zeros(shape=(S)), np.zeros(shape=(S)), np.zeros(shape=(S)),
np.zeros(shape=(S)), np.zeros(shape=(S)), np.zeros(shape=(S)), np.zeros(shape=(S)),
np.zeros(shape=(S)), np.zeros(shape=(S)),
np.zeros(shape=(S)),
np.zeros(shape=(S))],
[[0, 7.28560734e+00, 7.28560686e+00,
1.16697186e-06, -5.62873721e-01, 2.16861582e+00,
6.93147182e-01, -6.20201015e+00, -3.82297587e+00,
-6.42420810e-16, 5.30027955e-16, 4.44089210e-16,
1.79768915e-06, 0, 0, 0, 0, 0,
-1.92229003e-01, 0, 2.19786674e-01,
-5.78640960e-02, -3.56009841e-01, 0, 0, 0, 0, 0],
[1.00000000e+00, 3.80209446e+00, 4.73696232e+00,
2.55590081e+00, -3.41465622e-01, 1.50724840e+00, 0, -3.67243123e+00, -9.84464645e-01,
4.68172051e-17, -3.14159274e+00, -2.22044605e-16,
1.10479450e+00, 0, 0, 0, 0, 0,
-2.22846270e+00, 2.04876663e+02, 2.69857574e+00,
-1.21587858e-01, -1.10309076e+00, 0, 0, 0, 0, 0],
np.zeros(shape=(S)),
[1.00000000e+00, 3.80209589e+00, 4.73696327e+00,
2.55589986e+00, 1.94406736e+00, 3.78624082e-01, 0, 2.52958083e+00, 2.83851242e+00,
1.11972230e-17, 3.14159274e+00, 2.22044605e-16,
1.10479534e+00, 0, 0, 0, 0, 0,
-1.31169045e+00, 6.74720097e+00, 1.67359757e+00,
1.66524708e-01, 1.47069490e+00, 0, 0, 0, 0, 0],
[0, 7.28561020e+00, 7.28561068e+00,
-1.07633332e-06, 2.57871890e+00, 9.72976923e-01,
6.93147182e-01, 6.20201063e+00, 3.82297993e+00,
6.42419380e-16, -5.30027955e-16, -2.22044605e-16,
1.65806284e-06, 0, 0, 0, 0, 0,
-6.20821118e-01, 0, 8.71631145e-01,
1.06285838e-03, -7.25033402e-01, 0, 0, 0, 0, 0],
[0, 1.09825373e+01, 1.13401508e+01,
3.00454521e+00, 2.43993092e+00, 7.23134995e-01,
1.09861231e+00, 8.73158836e+00, 6.66149330e+00,
-2.44247823e-17, 3.14159274e+00, 4.44089210e-16,
1.10479617e+00, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0],
[0, 1.45712204e+01, 1.45712204e+01,
-8.27077542e-07, 2.57871866e+00, 9.72976804e-01,
1.38629436e+00, 1.24040203e+01, 7.64596176e+00,
1.28483802e-15, -1.06005549e-15, -6.66133815e-16,
1.27409032e-06, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0],
np.zeros(shape=(S)), np.zeros(shape=(S)), np.zeros(shape=(S)), np.zeros(shape=(S)),
np.zeros(shape=(S)), np.zeros(shape=(S)), np.zeros(shape=(S)), np.zeros(shape=(S)),
np.zeros(shape=(S)), np.zeros(shape=(S)), np.zeros(shape=(S)), np.zeros(shape=(S)),
np.zeros(shape=(S)), np.zeros(shape=(S)), np.zeros(shape=(S)), np.zeros(shape=(S)),
np.zeros(shape=(S)), np.zeros(shape=(S)), np.zeros(shape=(S)), np.zeros(shape=(S)),
np.zeros(shape=(S)), np.zeros(shape=(S)),
np.zeros(shape=(S))],
[[0, 1.09825363e+01, 1.13401499e+01,
3.00454664e+00, -4.65556115e-01, 1.91248298e+00,
1.09861231e+00, -9.87444305e+00, -4.80743980e+00,
8.24374220e-17, -3.14159274e+00, -4.44089210e-16,
1.10479355e+00, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0],
[0, 7.28560972e+00, 7.28560925e+00,
5.55161478e-07, -5.62873423e-01, 2.16861558e+00,
6.93147182e-01, -6.20201254e+00, -3.82297611e+00,
-6.42420704e-16, 5.30027796e-16, 4.44089210e-16,
8.55211340e-07, 0, 0, 0, 0, 0,
-7.39678815e-02, 0, 1.09441765e-01,
-1.31108221e-02, 1.17330207e-02, 0, 0, 0, 0, 0],
[1.00000000e+00, 3.80209589e+00, 4.73696327e+00,
2.55589986e+00, -3.41465712e-01, 1.50724864e+00, 0, -3.67243266e+00, -9.84464884e-01,
4.68171190e-17, -3.14159274e+00, -2.22044605e-16,
1.10479534e+00, 0, 0, 0, 0, 0,
-1.31169045e+00, 6.74720097e+00, 1.67359757e+00,
1.66524708e-01, 1.47069490e+00, 0, 0, 0, 0, 0],
np.zeros(shape=(S)),
[1.00000000e+00, 3.80209374e+00, 4.73696232e+00,
2.55589986e+00, 1.94406843e+00, 3.78623903e-01, 0, 2.52958202e+00, 2.83850884e+00,
1.11975100e-17, 3.14159274e+00, 2.22044605e-16,
1.10479379e+00, 0, 0, 0, 0, 0,
-3.67839885e+00, 8.79898254e+02, 6.06281567e+00,
3.71687233e-01, -4.08100748e+00, 0, 0, 0, 0, 0],
[0, 7.28560781e+00, 7.28560734e+00,
5.21172012e-07, 2.57872009e+00, 9.72977221e-01,
6.93147182e-01, 6.20201397e+00, 3.82297063e+00,
6.42420863e-16, -5.30027426e-16, -2.22044605e-16,
-8.02851446e-07, 0, 0, 0, 0, 0,
-2.02662915e-01, 0, 2.20212236e-01,
-5.84062114e-02, -3.69073659e-01, 0, 0, 0, 0, 0],
[0, 1.09825373e+01, 1.13401508e+01,
3.00454569e+00, 2.43993258e+00, 7.23135293e-01,
1.09861231e+00, 8.73159790e+00, 6.66148043e+00,
-2.44220195e-17, 3.14159274e+00, 4.44089210e-16,
1.10479414e+00, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0],
[0, 1.45712185e+01, 1.45712194e+01,
-1.58617510e-07, 2.57872009e+00, 9.72977221e-01,
1.38629436e+00, 1.24040298e+01, 7.64594460e+00,
1.28483982e-15, -1.06005559e-15, -6.66133815e-16,
2.44346097e-07, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0],
np.zeros(shape=(S)), np.zeros(shape=(S)), np.zeros(shape=(S)), np.zeros(shape=(S)),
np.zeros(shape=(S)), np.zeros(shape=(S)), np.zeros(shape=(S)), np.zeros(shape=(S)),
np.zeros(shape=(S)), np.zeros(shape=(S)), np.zeros(shape=(S)), np.zeros(shape=(S)),
np.zeros(shape=(S)), np.zeros(shape=(S)), np.zeros(shape=(S)), np.zeros(shape=(S)),
np.zeros(shape=(S)), np.zeros(shape=(S)), np.zeros(shape=(S)), np.zeros(shape=(S)),
np.zeros(shape=(S)),
np.zeros(shape=(S))],
[[0, 1.45712175e+01, 1.45712175e+01,
9.06385296e-08, -5.62871814e-01, 2.16861534e+00,
1.38629436e+00, -1.24040337e+01, -7.64593506e+00,
-1.28483844e-15, 1.06005591e-15, 6.66133815e-16,
1.39626337e-07, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0],
[0, 1.09825373e+01, 1.13401508e+01,
3.00454545e+00, -4.65554893e-01, 1.91248250e+00,
1.09861231e+00, -9.87444973e+00, -4.80742836e+00,
8.24393609e-17, -3.14159274e+00, -4.44089210e-16,
1.10479283e+00, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0],
[0, 7.28561020e+00, 7.28561068e+00,
-1.07633332e-06, -5.62871873e-01, 2.16861534e+00,
6.93147182e-01, -6.20201731e+00, -3.82296968e+00,
-6.42419433e-16, 5.30027373e-16, 4.44089210e-16,
-1.65806284e-06, 0, 0, 0, 0, 0,
-6.20821118e-01, 0, 8.71631145e-01,
1.06285838e-03, -7.25033402e-01, 0, 0, 0, 0, 0],
[1.00000000e+00, 3.80209374e+00, 4.73696232e+00,
2.55589986e+00, -3.41464877e-01, 1.50724792e+00, 0, -3.67243147e+00, -9.84461367e-01,
4.68175161e-17, -3.14159274e+00, -2.22044605e-16,
1.10479379e+00, 0, 0, 0, 0, 0,
-3.67839885e+00, 8.79898254e+02, 6.06281567e+00,
3.71687233e-01, -4.08100748e+00, 0, 0, 0, 0, 0],
np.zeros(shape=(S)),
[1.00000000e+00, 3.80209446e+00, 4.73696232e+00,
2.55590081e+00, 1.94406879e+00, 3.78623903e-01, 0, 2.52958250e+00, 2.83850908e+00,
1.11970799e-17, 3.14159274e+00, 2.22044605e-16,
1.10479450e+00, 0, 0, 0, 0, 0,
-2.22846270e+00, 2.04876663e+02, 2.69857574e+00,
-1.21587858e-01, -1.10309076e+00, 0, 0, 0, 0, 0],
[0, 7.28561020e+00, 7.28560972e+00,
2.49256118e-07, 2.57872009e+00, 9.72977221e-01,
6.93147182e-01, 6.20201588e+00, 3.82297158e+00,
6.42420122e-16, -5.30027690e-16, -2.22044605e-16,
-3.83972434e-07, 0, 0, 0, 0, 0,
-6.45447522e-02, 0, 1.05444595e-01,
-1.05051743e-02, -1.32832214e-01, 0, 0, 0, 0, 0],
[0, 1.09825382e+01, 1.13401518e+01,
3.00454569e+00, 2.43993282e+00, 7.23135352e-01,
1.09861231e+00, 8.73160076e+00, 6.66147947e+00,
-2.44219004e-17, 3.14159274e+00, 4.44089210e-16,
1.10479343e+00, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0],
np.zeros(shape=(S)), np.zeros(shape=(S)), np.zeros(shape=(S)), np.zeros(shape=(S)),
np.zeros(shape=(S)), np.zeros(shape=(S)), np.zeros(shape=(S)), np.zeros(shape=(S)),
np.zeros(shape=(S)), np.zeros(shape=(S)), np.zeros(shape=(S)), np.zeros(shape=(S)),
np.zeros(shape=(S)), np.zeros(shape=(S)), np.zeros(shape=(S)), np.zeros(shape=(S)),
np.zeros(shape=(S)), np.zeros(shape=(S)), np.zeros(shape=(S)), np.zeros(shape=(S)),
np.zeros(shape=(S)),
np.zeros(shape=(S))],
[np.zeros(shape=(S)),
[0, 1.45712175e+01, 1.45712166e+01,
1.07633355e-06, -5.62873960e-01, 2.16861582e+00,
1.38629436e+00, -1.24040203e+01, -7.64595652e+00,
-1.28484427e-15, 1.06005580e-15, 6.66133815e-16,
1.65806284e-06, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0],
[0, 1.09825373e+01, 1.13401508e+01,
3.00454521e+00, -4.65556622e-01, 1.91248310e+00,
1.09861231e+00, -9.87444305e+00, -4.80744314e+00,
8.24356883e-17, -3.14159274e+00, -4.44089210e-16,
1.10479617e+00, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0],
[0, 7.28560781e+00, 7.28560734e+00,
5.21172012e-07, -5.62873483e-01, 2.16861558e+00,
6.93147182e-01, -6.20201111e+00, -3.82297564e+00,
-6.42420492e-16, 5.30027690e-16, 4.44089210e-16,
8.02851446e-07, 0, 0, 0, 0, 0,
-2.02662915e-01, 0, 2.20212236e-01,
-5.84062114e-02, -3.69073659e-01, 0, 0, 0, 0, 0],
[1.00000000e+00, 3.80209446e+00, 4.73696232e+00,
2.55590081e+00, -3.41465622e-01, 1.50724840e+00, 0, -3.67243123e+00, -9.84464645e-01,
4.68172051e-17, -3.14159274e+00, -2.22044605e-16,
1.10479450e+00, 0, 0, 0, 0, 0,
-2.22846270e+00, 2.04876663e+02, 2.69857574e+00,
-1.21587858e-01, -1.10309076e+00, 0, 0, 0, 0, 0],
np.zeros(shape=(S)),
[1.00000000e+00, 3.80209589e+00, 4.73696375e+00,
2.55589986e+00, 1.94406760e+00, 3.78624111e-01, 0, 2.52958155e+00, 2.83851218e+00,
1.11973512e-17, 3.14159274e+00, 2.22044605e-16,
1.10479486e+00, 0, 0, 0, 0, 0,
-7.31544256e-01, 1.80469227e+00, 3.98014307e-01,
-1.71806701e-02, -2.58314610e-01, 0, 0, 0, 0, 0],
[0, 7.28561115e+00, 7.28561163e+00,
-6.79789594e-07, 2.57871914e+00, 9.72976923e-01,
6.93147182e-01, 6.20201254e+00, 3.82297897e+00,
6.42419963e-16, -5.30027796e-16, -2.22044605e-16,
1.04719754e-06, 0, 0, 0, 0, 0,
-4.29591179e-01, 0, -7.36617744e-02,
-4.40440699e-02, -9.07294303e-02, 0, 0, 0, 0, 0],
np.zeros(shape=(S)), np.zeros(shape=(S)), np.zeros(shape=(S)), np.zeros(shape=(S)),
np.zeros(shape=(S)), np.zeros(shape=(S)), np.zeros(shape=(S)), np.zeros(shape=(S)),
np.zeros(shape=(S)), np.zeros(shape=(S)), np.zeros(shape=(S)), np.zeros(shape=(S)),
np.zeros(shape=(S)), np.zeros(shape=(S)), np.zeros(shape=(S)), np.zeros(shape=(S)),
np.zeros(shape=(S)), np.zeros(shape=(S)), np.zeros(shape=(S)), np.zeros(shape=(S)),
np.zeros(shape=(S)),
np.zeros(shape=(S))],
[np.zeros(shape=(S)),
np.zeros(shape=(S)),
[0, 1.45712204e+01, 1.45712204e+01,
-8.27077542e-07, -5.62872589e-01, 2.16861534e+00,
1.38629436e+00, -1.24040298e+01, -7.64594603e+00,
-1.28483876e-15, 1.06005464e-15, 6.66133815e-16,
-1.27409032e-06, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0],
[0, 1.09825373e+01, 1.13401508e+01,
3.00454569e+00, -4.65555817e-01, 1.91248286e+00,
1.09861231e+00, -9.87444592e+00, -4.80743694e+00,
8.24373029e-17, -3.14159274e+00, -4.44089210e-16,
1.10479414e+00, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0],
[0, 7.28561020e+00, 7.28560972e+00,
2.49256118e-07, -5.62872946e-01, 2.16861558e+00,
6.93147182e-01, -6.20201445e+00, -3.82297397e+00,
-6.42420439e-16, 5.30027796e-16, 4.44089210e-16,
3.83972434e-07, 0, 0, 0, 0, 0,
-6.45447522e-02, 0, 1.05444595e-01,
-1.05051743e-02, -1.32832214e-01, 0, 0, 0, 0, 0],
[1.00000000e+00, 3.80209589e+00, 4.73696375e+00,
2.55589986e+00, -3.41465414e-01, 1.50724852e+00, 0, -3.67243314e+00, -9.84463811e-01,
4.68172481e-17, -3.14159274e+00, -2.22044605e-16,
1.10479486e+00, 0, 0, 0, 0, 0,
-7.31544256e-01, 1.80469227e+00, 3.98014307e-01,
-1.71806701e-02, -2.58314610e-01, 0, 0, 0, 0, 0],
np.zeros(shape=(S)),
[1.00000000e+00, 3.80209470e+00, 4.73696280e+00,
2.55590081e+00, 1.94406915e+00, 3.78623962e-01, 0, 2.52958345e+00, 2.83850884e+00,
1.11972619e-17, 3.14159274e+00, 2.22044605e-16,
1.10479391e+00, 0, 0, 0, 0, 0,
-2.75101089e+00, 7.84158997e+02, 4.00388569e-01,
2.45321058e-02, -1.18683946e+00, 0, 0, 0, 0, 0],
np.zeros(shape=(S)), np.zeros(shape=(S)), np.zeros(shape=(S)), np.zeros(shape=(S)),
np.zeros(shape=(S)), np.zeros(shape=(S)), np.zeros(shape=(S)), np.zeros(shape=(S)),
np.zeros(shape=(S)), np.zeros(shape=(S)), np.zeros(shape=(S)), np.zeros(shape=(S)),
np.zeros(shape=(S)), np.zeros(shape=(S)), np.zeros(shape=(S)), np.zeros(shape=(S)),
np.zeros(shape=(S)), np.zeros(shape=(S)), np.zeros(shape=(S)), np.zeros(shape=(S)),
np.zeros(shape=(S)),
np.zeros(shape=(S))],
[np.zeros(shape=(S)),
np.zeros(shape=(S)), np.zeros(shape=(S)),
[0, 1.45712185e+01, 1.45712194e+01,
-1.58617510e-07, -5.62872350e-01, 2.16861534e+00,
1.38629436e+00, -1.24040318e+01, -7.64594173e+00,
-1.28483844e-15, 1.06005538e-15, 6.66133815e-16,
-2.44346097e-07, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0],
[0, 1.09825382e+01, 1.13401518e+01,
3.00454569e+00, -4.65555400e-01, 1.91248274e+00,
1.09861231e+00, -9.87444878e+00, -4.80743361e+00,
8.24388779e-17, -3.14159274e+00, -4.44089210e-16,
1.10479343e+00, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0],
[0, 7.28561115e+00, 7.28561163e+00,
-6.79789594e-07, -5.62872410e-01, 2.16861534e+00,
6.93147182e-01, -6.20201635e+00, -3.82297254e+00,
-6.42419645e-16, 5.30027426e-16, 4.44089210e-16,
-1.04719754e-06, 0, 0, 0, 0, 0,
-4.29591179e-01, 0, -7.36617744e-02,
-4.40440699e-02, -9.07294303e-02, 0, 0, 0, 0, 0],
[1.00000000e+00, 3.80209470e+00, 4.73696280e+00,
2.55590081e+00, -3.41465175e-01, 1.50724828e+00, 0, -3.67243195e+00, -9.84463096e-01,
4.68173837e-17, -3.14159274e+00, -2.22044605e-16,
1.10479391e+00, 0, 0, 0, 0, 0,
-2.75101089e+00, 7.84158997e+02, 4.00388569e-01,
2.45321058e-02, -1.18683946e+00, 0, 0, 0, 0, 0],
np.zeros(shape=(S)), np.zeros(shape=(S)), np.zeros(shape=(S)), np.zeros(shape=(S)),
np.zeros(shape=(S)), np.zeros(shape=(S)), np.zeros(shape=(S)), np.zeros(shape=(S)),
np.zeros(shape=(S)), np.zeros(shape=(S)), np.zeros(shape=(S)), np.zeros(shape=(S)),
np.zeros(shape=(S)), np.zeros(shape=(S)), np.zeros(shape=(S)), np.zeros(shape=(S)),
np.zeros(shape=(S)), np.zeros(shape=(S)), np.zeros(shape=(S)), np.zeros(shape=(S)),
np.zeros(shape=(S)), np.zeros(shape=(S)),
np.zeros(shape=(S))],
np.zeros(shape=(N, S)), np.zeros(shape=(N, S)),
np.zeros(shape=(N, S)), np.zeros(shape=(N, S)),
np.zeros(shape=(N, S)), np.zeros(shape=(N, S)),
np.zeros(shape=(N, S)), np.zeros(shape=(N, S)),
np.zeros(shape=(N, S)), np.zeros(shape=(N, S)),
np.zeros(shape=(N, S)), np.zeros(shape=(N, S)),
np.zeros(shape=(N, S)), np.zeros(shape=(N, S)),
np.zeros(shape=(N, S)), np.zeros(shape=(N, S)),
np.zeros(shape=(N, S)), np.zeros(shape=(N, S)),
np.zeros(shape=(N, S)), np.zeros(shape=(N, S)),
np.zeros(shape=(N, S)), np.zeros(shape=(N, S))], dtype=np.float32)
expected_A = np.array([[0., 1., 1., 1., 1., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0.,
0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0.],
[1., 0., 1., 1., 1., 1., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0.,
0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0.],
[1., 1., 0., 1., 1., 1., 1., 0., 0., 0., 0., 0., 0., 0., 0., 0.,
0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0.],
[1., 1., 1., 0., 1., 1., 1., 1., 0., 0., 0., 0., 0., 0., 0., 0.,
0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0.],
[1., 1., 1., 1., 0., 1., 1., 1., 0., 0., 0., 0., 0., 0., 0., 0.,
0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0.],
[0., 1., 1., 1., 1., 0., 1., 1., 0., 0., 0., 0., 0., 0., 0., 0.,
0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0.],
[0., 0., 1., 1., 1., 1., 0., 1., 0., 0., 0., 0., 0., 0., 0., 0.,
0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0.],
[0., 0., 0., 1., 1., 1., 1., 0., 0., 0., 0., 0., 0., 0., 0., 0.,
0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0.],
[0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0.,
0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0.],
[0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0.,
0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0.],
[0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0.,
0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0.],
[0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0.,
0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0.],
[0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0.,
0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0.],
[0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0.,
0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0.],
[0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0.,
0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0.],
[0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0.,
0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0.],
[0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0.,
0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0.],
[0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0.,
0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0.],
[0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0.,
0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0.],
[0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0.,
0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0.],
[0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0.,
0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0.],
[0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0.,
0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0.],
[0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0.,
0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0.],
[0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0.,
0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0.],
[0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0.,
0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0.],
[0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0.,
0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0.],
[0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0.,
0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0.],
[0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0.,
0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0.],
[0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0.,
0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0.],
[0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0.,
0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0.]],
dtype=np.float32)
test(X, expected_X)
test(A, expected_A)
test(E, expected_E)
except ImportError:
print("Unable to use pyrosetta. Did you call init?")
| 86.412621
| 124
| 0.398194
| 8,674
| 71,204
| 3.263431
| 0.054877
| 0.093616
| 0.496626
| 0.581411
| 0.886459
| 0.873282
| 0.869891
| 0.860353
| 0.847211
| 0.847211
| 0
| 0.575176
| 0.45225
| 71,204
| 823
| 125
| 86.517618
| 0.150608
| 0
| 0
| 0.730818
| 0
| 0
| 0.000789
| 0
| 0
| 0
| 0
| 0
| 0.005031
| 1
| 0.003774
| false
| 0
| 0.007547
| 0
| 0.012579
| 0.002516
| 0
| 0
| 0
| null | 0
| 1
| 1
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 11
|
2599623a11ca9449647b80042e056cbbe3a4b818
| 4,710
|
py
|
Python
|
tests/test_enum_parameters.py
|
Algomorph/ext_argparse
|
fbca26f8a551f84677475a11fb5415ddda78abd9
|
[
"Apache-2.0"
] | 1
|
2021-09-06T23:22:07.000Z
|
2021-09-06T23:22:07.000Z
|
tests/test_enum_parameters.py
|
Algomorph/ext_argparse
|
fbca26f8a551f84677475a11fb5415ddda78abd9
|
[
"Apache-2.0"
] | 11
|
2021-09-07T14:13:39.000Z
|
2021-09-29T15:17:46.000Z
|
tests/test_enum_parameters.py
|
Algomorph/ext_argparse
|
fbca26f8a551f84677475a11fb5415ddda78abd9
|
[
"Apache-2.0"
] | null | null | null |
import os
import os.path
import pathlib
import pytest
from ext_argparse import process_arguments, process_settings_file
from tests.common import HouseParameters, HouseStyle, RoofMaterial, test_data_dir
def test_default_parameters():
process_arguments(HouseParameters, "Parameters of the house to repair.", argv=[])
assert HouseParameters.sturdiness.value == 5.0
assert HouseParameters.year_built.value == 2000
assert HouseParameters.roof.year_changed.value == 2010
assert HouseParameters.style.value == HouseStyle.CRAFTSMAN_BUNGALO
assert HouseParameters.roof.roof_material.value == RoofMaterial.SLATE
def test_changed_enum_parameters():
process_arguments(HouseParameters, "Parameters of the house to repair.", argv=[
"--sturdiness=6.0",
"--year_built=2001",
"--roof.year_changed=2012",
"--style=CONTEMPORARY",
"--roof.roof_material=SOLAR"
])
assert HouseParameters.sturdiness.value == 6.0
assert HouseParameters.year_built.value == 2001
assert HouseParameters.roof.year_changed.value == 2012
assert HouseParameters.style.value == HouseStyle.CONTEMPORARY
assert HouseParameters.roof.roof_material.value == RoofMaterial.SOLAR
def test_nested_parameter_save_load(test_data_dir):
output_settings_path = os.path.join(test_data_dir, "enum_settings.yaml")
process_arguments(HouseParameters, "Parameters of the house to repair.", argv=[
f"--settings_file={output_settings_path}"])
assert HouseParameters.sturdiness.value == 5.0
assert HouseParameters.year_built.value == 2000
assert HouseParameters.roof.year_changed.value == 2010
assert HouseParameters.style.value == HouseStyle.CRAFTSMAN_BUNGALO
assert HouseParameters.roof.roof_material.value == RoofMaterial.SLATE
process_arguments(HouseParameters, "Parameters of the house to repair.", argv=[
f"--settings_file={output_settings_path}",
"--save_settings",
"--sturdiness=6.0",
"--year_built=2001",
"--roof.year_changed=2012",
"--style=CONTEMPORARY",
"--roof.roof_material=SOLAR"
])
assert HouseParameters.sturdiness.value == 6.0
assert HouseParameters.year_built.value == 2001
assert HouseParameters.roof.year_changed.value == 2012
assert HouseParameters.style.value == HouseStyle.CONTEMPORARY
assert HouseParameters.roof.roof_material.value == RoofMaterial.SOLAR
# load defaults
process_arguments(HouseParameters, "Parameters of the house to repair.", argv=[
"--sturdiness=5.0",
"--year_built=2000",
"--roof.year_changed=2010",
"--style=CRAFTSMAN_BUNGALO",
"--roof.roof_material=SLATE"
])
assert HouseParameters.sturdiness.value == 5.0
assert HouseParameters.year_built.value == 2000
assert HouseParameters.roof.year_changed.value == 2010
assert HouseParameters.style.value == HouseStyle.CRAFTSMAN_BUNGALO
assert HouseParameters.roof.roof_material.value == RoofMaterial.SLATE
process_arguments(HouseParameters, "Parameters of the house to repair.", argv=[
f"--settings_file={output_settings_path}"])
# verify loaded settings were altered from default
assert HouseParameters.sturdiness.value == 6.0
assert HouseParameters.year_built.value == 2001
assert HouseParameters.roof.year_changed.value == 2012
assert HouseParameters.style.value == HouseStyle.CONTEMPORARY
assert HouseParameters.roof.roof_material.value == RoofMaterial.SOLAR
# save defaults now
process_arguments(HouseParameters, "Parameters of the house to repair.", argv=[
f"--settings_file={output_settings_path}",
"--save_settings",
"--sturdiness=5.0",
"--year_built=2000",
"--roof.year_changed=2010",
"--style=CRAFTSMAN_BUNGALO",
"--roof.roof_material=SLATE"
])
assert HouseParameters.sturdiness.value == 5.0
assert HouseParameters.year_built.value == 2000
assert HouseParameters.roof.year_changed.value == 2010
assert HouseParameters.style.value == HouseStyle.CRAFTSMAN_BUNGALO
assert HouseParameters.roof.roof_material.value == RoofMaterial.SLATE
def test_process_settings_file(test_data_dir):
settings_path = os.path.join(test_data_dir, "enum_settings2.yaml")
process_settings_file(HouseParameters, settings_path, generate_default_settings_if_missing=False)
assert HouseParameters.sturdiness.value == 4.5
assert HouseParameters.year_built.value == 1965
assert HouseParameters.roof.year_changed.value == 1995
assert HouseParameters.style.value == HouseStyle.QUEEN_ANNE
assert HouseParameters.roof.roof_material.value == RoofMaterial.SLATE
| 40.25641
| 101
| 0.736518
| 534
| 4,710
| 6.305243
| 0.140449
| 0.24948
| 0.1188
| 0.085536
| 0.850312
| 0.82774
| 0.815563
| 0.815563
| 0.79804
| 0.776062
| 0
| 0.030625
| 0.161147
| 4,710
| 116
| 102
| 40.603448
| 0.821564
| 0.016985
| 0
| 0.8
| 0
| 0
| 0.190013
| 0.0869
| 0
| 0
| 0
| 0
| 0.444444
| 1
| 0.044444
| false
| 0
| 0.066667
| 0
| 0.111111
| 0
| 0
| 0
| 0
| null | 1
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 9
|
25a64833c7a14247d04275e847ba82a9113cf9a4
| 17,166
|
py
|
Python
|
development/v1/geocoding.py
|
jonathanengelbert/automated_geocoding
|
b7b0755729c6c6d7a393426c3ad11f3b4070d41b
|
[
"MIT"
] | 1
|
2020-01-21T21:43:04.000Z
|
2020-01-21T21:43:04.000Z
|
development/v1/geocoding.py
|
jonathanengelbert/automated_geocoder
|
b7b0755729c6c6d7a393426c3ad11f3b4070d41b
|
[
"MIT"
] | null | null | null |
development/v1/geocoding.py
|
jonathanengelbert/automated_geocoder
|
b7b0755729c6c6d7a393426c3ad11f3b4070d41b
|
[
"MIT"
] | null | null | null |
# -*- coding: utf-8 -*-
# ---------------------------------------------------------------------------
# geocoding.py
# Created on: 2018-02-23 10:51:03.00000
# (generated by ArcGIS/ModelBuilder)
# Description: This script geocodes a table of addresses from an Excel
# spreadsheet.
# ---------------------------------------------------------------------------
# Import arcpy module
import arcpy
arcpy.env.overwriteOutput = True
def geocode():
# Local variables:
AddressLocator_Street_Centerlines = "I:\\GIS\\OASIS\\AddressLocators\\AddressLocator_Street_Centerlines"
transformed_xlsx = "I:\\GIS\\OASIS\\Geocoder\\transformed.xlsx"
transfomed = "I:\\GIS\\OASIS\\Geocoder\\geocoder.gdb\\transfomed"
AddressLocator_Master_Address_Database = "I:\\GIS\\OASIS\\AddressLocators\\AddressLocator_Master_Address_Database"
geocoded_addresses = "I:\\GIS\\OASIS\\Geocoder\\geocoder.gdb\\geocoded_addresses"
geocoder_gdb = "I:\\GIS\\OASIS\\Geocoder\\geocoder.gdb"
geocoded_master_successful = "geocoded_master_successful"
geocoded_master_successful__2_ = geocoded_master_successful
geocoded_master_successful__4_ = geocoded_master_successful__2_
geocoder_gdb__2_ = "I:\\GIS\\OASIS\\Geocoder\\geocoder.gdb"
geocoded_addresses_failed = "geocoded_addresses_failed"
geocoder_gdb__3_ = "I:\\GIS\\OASIS\\Geocoder\\geocoder.gdb"
failed_table = "I:\\GIS\\OASIS\\Geocoder\\geocoder.gdb\\failed_table"
failed_table__2_ = failed_table
AddressLocator_Street_Centerlines__2_ = "I:\\GIS\\OASIS\\AddressLocators\\AddressLocator_Street_Centerlines"
geocoded_street_centerlines = "I:\\GIS\\OASIS\\Geocoder\\geocoder.gdb\\geocoded_street_centerlines"
geocoder_gdb__4_ = "I:\\GIS\\OASIS\\Geocoder\\geocoder.gdb"
geocoded_street_centerlines_successful = "geocoded_street_centerlines_successful"
geocoded_street_centerlines_successful__2_ = geocoded_street_centerlines_successful
geocoded_street_centerlines_successful__3_ = geocoded_street_centerlines_successful__2_
final = "I:\\GIS\\OASIS\\Geocoder\\geocoder.gdb\\final"
# Process: Excel To Table
arcpy.ExcelToTable_conversion(transformed_xlsx, transfomed, "")
# Process: Geocode Addresses
arcpy.GeocodeAddresses_geocoding(transfomed, AddressLocator_Master_Address_Database, "Key transformed_address VISIBLE NONE", geocoded_addresses, "STATIC", "", "ROUTING_LOCATION")
# Process: Make Feature Layer (2)
arcpy.MakeFeatureLayer_management(geocoded_addresses, geocoded_master_successful, "Status = 'M' OR Status = 'T'", geocoder_gdb, "ObjectID OBJECTID VISIBLE NONE;Shape Shape VISIBLE NONE;Status Status VISIBLE NONE;Score Score VISIBLE NONE;Match_type Match_type VISIBLE NONE;Match_addr Match_addr VISIBLE NONE;X X VISIBLE NONE;Y Y VISIBLE NONE;Xmin Xmin VISIBLE NONE;Xmax Xmax VISIBLE NONE;Ymin Ymin VISIBLE NONE;Ymax Ymax VISIBLE NONE;Addr_type Addr_type VISIBLE NONE;ARC_SingleKey ARC_SingleKey VISIBLE NONE")
# Process: Add Field
arcpy.AddField_management(geocoded_master_successful, "geocoder", "TEXT", "", "", "20", "", "NULLABLE", "NON_REQUIRED", "")
# Process: Calculate Field
arcpy.CalculateField_management(geocoded_master_successful__2_, "geocoder", "\"EAS\"", "PYTHON", "")
# Process: Make Feature Layer
arcpy.MakeFeatureLayer_management(geocoded_addresses, geocoded_addresses_failed, "Status = 'U'", geocoder_gdb__2_, "ObjectID OBJECTID VISIBLE NONE;Shape Shape VISIBLE NONE;Status Status VISIBLE NONE;Score Score VISIBLE NONE;Match_type Match_type VISIBLE NONE;Match_addr Match_addr VISIBLE NONE;X X VISIBLE NONE;Y Y VISIBLE NONE;Xmin Xmin VISIBLE NONE;Xmax Xmax VISIBLE NONE;Ymin Ymin VISIBLE NONE;Ymax Ymax VISIBLE NONE;Addr_type Addr_type VISIBLE NONE;ARC_SingleKey ARC_SingleKey VISIBLE NONE")
# Process: Table to Table
arcpy.TableToTable_conversion(geocoded_addresses_failed, geocoder_gdb__3_, "failed_table", "", "Status \"Status\" true true false 1 Text 0 0 ,First,#,I:\\GIS\\OASIS\\Geocoder\\geocoder.gdb\\geocoded_addresses,Status,-1,-1;Score \"Score\" true true false 8 Double 0 0 ,First,#,I:\\GIS\\OASIS\\Geocoder\\geocoder.gdb\\geocoded_addresses,Score,-1,-1;Match_type \"Match_type\" true true false 2 Text 0 0 ,First,#,I:\\GIS\\OASIS\\Geocoder\\geocoder.gdb\\geocoded_addresses,Match_type,-1,-1;Match_addr \"Match_addr\" true true false 120 Text 0 0 ,First,#,I:\\GIS\\OASIS\\Geocoder\\geocoder.gdb\\geocoded_addresses,Match_addr,-1,-1;X \"X\" true true false 8 Double 0 0 ,First,#,I:\\GIS\\OASIS\\Geocoder\\geocoder.gdb\\geocoded_addresses,X,-1,-1;Y \"Y\" true true false 8 Double 0 0 ,First,#,I:\\GIS\\OASIS\\Geocoder\\geocoder.gdb\\geocoded_addresses,Y,-1,-1;Xmin \"Xmin\" true true false 8 Double 0 0 ,First,#,I:\\GIS\\OASIS\\Geocoder\\geocoder.gdb\\geocoded_addresses,Xmin,-1,-1;Xmax \"Xmax\" true true false 8 Double 0 0 ,First,#,I:\\GIS\\OASIS\\Geocoder\\geocoder.gdb\\geocoded_addresses,Xmax,-1,-1;Ymin \"Ymin\" true true false 8 Double 0 0 ,First,#,I:\\GIS\\OASIS\\Geocoder\\geocoder.gdb\\geocoded_addresses,Ymin,-1,-1;Ymax \"Ymax\" true true false 8 Double 0 0 ,First,#,I:\\GIS\\OASIS\\Geocoder\\geocoder.gdb\\geocoded_addresses,Ymax,-1,-1;Addr_type \"Addr_type\" true true false 20 Text 0 0 ,First,#,I:\\GIS\\OASIS\\Geocoder\\geocoder.gdb\\geocoded_addresses,Addr_type,-1,-1;ARC_Single_Line_Input \"Key\" true true false 100 Text 0 0 ,First,#,I:\\GIS\\OASIS\\Geocoder\\geocoder.gdb\\geocoded_addresses,ARC_Single_Line_Input,-1,-1;ST_NAME \"ST_NAME\" true true false 255 Text 0 0 ,First,#,I:\\GIS\\OASIS\\Geocoder\\geocoder.gdb\\geocoded_addresses,ST_NAME,-1,-1;EXT \"EXT\" true true false 255 Text 0 0 ,First,#,I:\\GIS\\OASIS\\Geocoder\\geocoder.gdb\\geocoded_addresses,EXT,-1,-1;NUM \"NUM\" true true false 4 Long 0 0 ,First,#,I:\\GIS\\OASIS\\Geocoder\\geocoder.gdb\\geocoded_addresses,NUM,-1,-1;LETTER \"LETTER\" true true false 255 Text 0 0 ,First,#,I:\\GIS\\OASIS\\Geocoder\\geocoder.gdb\\geocoded_addresses,LETTER,-1,-1;UNIT \"UNIT\" true true false 255 Text 0 0 ,First,#,I:\\GIS\\OASIS\\Geocoder\\geocoder.gdb\\geocoded_addresses,UNIT,-1,-1;Address \"Address\" true true false 255 Text 0 0 ,First,#,I:\\GIS\\OASIS\\Geocoder\\geocoder.gdb\\geocoded_addresses,Address,-1,-1;DBA \"DBA\" true true false 255 Text 0 0 ,First,#,I:\\GIS\\OASIS\\Geocoder\\geocoder.gdb\\geocoded_addresses,DBA,-1,-1;OWNER \"OWNER\" true true false 255 Text 0 0 ,First,#,I:\\GIS\\OASIS\\Geocoder\\geocoder.gdb\\geocoded_addresses,OWNER,-1,-1;ZIP \"ZIP\" true true false 4 Long 0 0 ,First,#,I:\\GIS\\OASIS\\Geocoder\\geocoder.gdb\\geocoded_addresses,ZIP,-1,-1;LONGITUDE \"LONGITUDE\" true true false 8 Double 0 0 ,First,#,I:\\GIS\\OASIS\\Geocoder\\geocoder.gdb\\geocoded_addresses,LONGITUDE,-1,-1;LATITUDE \"LATITUDE\" true true false 8 Double 0 0 ,First,#,I:\\GIS\\OASIS\\Geocoder\\geocoder.gdb\\geocoded_addresses,LATITUDE,-1,-1;DISTRICT \"DISTRICT\" true true false 4 Long 0 0 ,First,#,I:\\GIS\\OASIS\\Geocoder\\geocoder.gdb\\geocoded_addresses,DISTRICT,-1,-1;LOC_ID_NO \"LOC_ID_NO\" true true false 4 Long 0 0 ,First,#,I:\\GIS\\OASIS\\Geocoder\\geocoder.gdb\\geocoded_addresses,LOC_ID_NO,-1,-1;CERT_NO \"CERT_NO\" true true false 4 Long 0 0 ,First,#,I:\\GIS\\OASIS\\Geocoder\\geocoder.gdb\\geocoded_addresses,CERT_NO,-1,-1;H31_ACCOUNT_NO \"H31_ACCOUNT_NO\" true true false 255 Text 0 0 ,First,#,I:\\GIS\\OASIS\\Geocoder\\geocoder.gdb\\geocoded_addresses,H31_ACCOUNT_NO,-1,-1;PERMIT_NO \"PERMIT NO\" true true false 255 Text 0 0 ,First,#,I:\\GIS\\OASIS\\Geocoder\\geocoder.gdb\\geocoded_addresses,PERMIT_NO,-1,-1;BOE_NO \"BOE_NO\" true true false 4 Long 0 0 ,First,#,I:\\GIS\\OASIS\\Geocoder\\geocoder.gdb\\geocoded_addresses,BOE_NO,-1,-1;DATE_ISSUED \"DATE_ISSUED\" true true false 8 Date 0 0 ,First,#,I:\\GIS\\OASIS\\Geocoder\\geocoder.gdb\\geocoded_addresses,DATE_ISSUED,-1,-1;LAST_ROUTINE \"LAST_ROUTINE\" true true false 8 Date 0 0 ,First,#,I:\\GIS\\OASIS\\Geocoder\\geocoder.gdb\\geocoded_addresses,LAST_ROUTINE,-1,-1;transformed_address \"transformed_address\" true true false 255 Text 0 0 ,First,#,I:\\GIS\\OASIS\\Geocoder\\geocoder.gdb\\geocoded_addresses,transformed_address,-1,-1;geocoder \"geocoder\" true true false 20 Text 0 0 ,First,#,I:\\GIS\\OASIS\\Geocoder\\geocoder.gdb\\geocoded_addresses,geocoder,-1,-1;ARC_SingleKey \"Key\" true true false 100 Text 0 0 ,First,#,I:\\GIS\\OASIS\\Geocoder\\geocoder.gdb\\geocoded_addresses,ARC_SingleKey,-1,-1", "")
# Process: Delete Field
arcpy.DeleteField_management(failed_table, "ARC_Single_Line_Input")
# Process: Geocode Addresses (2)
arcpy.GeocodeAddresses_geocoding(failed_table__2_, AddressLocator_Street_Centerlines__2_, "'Full Address' transformed_address VISIBLE NONE", geocoded_street_centerlines, "STATIC", "", "")
# Process: Make Feature Layer (3)
arcpy.MakeFeatureLayer_management(geocoded_street_centerlines, geocoded_street_centerlines_successful, "", geocoder_gdb__4_, "ObjectID OBJECTID VISIBLE NONE;Shape Shape VISIBLE NONE;Status Status VISIBLE NONE;Score Score VISIBLE NONE;Match_type Match_type VISIBLE NONE;Match_addr Match_addr VISIBLE NONE;Side Side VISIBLE NONE;Ref_ID Ref_ID VISIBLE NONE;User_fld User_fld VISIBLE NONE;Addr_type Addr_type VISIBLE NONE;ARC_Single_Line_Input ARC_Single_Line_Input VISIBLE NONE;Status_1 Status_1 VISIBLE NONE;Score_1 Score_1 VISIBLE NONE;Match_type_1 Match_type_1 VISIBLE NONE;Match_addr_1 Match_addr_1 VISIBLE NONE;X X VISIBLE NONE;Y Y VISIBLE NONE;Xmin Xmin VISIBLE NONE;Xmax Xmax VISIBLE NONE;Ymin Ymin VISIBLE NONE;Ymax Ymax VISIBLE NONE;Addr_type_1 Addr_type_1 VISIBLE NONE;ST_NAME ST_NAME VISIBLE NONE;EXT EXT VISIBLE NONE;NUM NUM VISIBLE NONE;LETTER LETTER VISIBLE NONE;UNIT UNIT VISIBLE NONE;Address Address VISIBLE NONE;DBA DBA VISIBLE NONE;OWNER OWNER VISIBLE NONE;ZIP ZIP VISIBLE NONE;LONGITUDE LONGITUDE VISIBLE NONE;LATITUDE LATITUDE VISIBLE NONE;DISTRICT DISTRICT VISIBLE NONE;LOC_ID_NO LOC_ID_NO VISIBLE NONE;CERT_NO CERT_NO VISIBLE NONE;H31_ACCOUNT_NO H31_ACCOUNT_NO VISIBLE NONE;PERMIT_NO PERMIT_NO VISIBLE NONE;BOE_NO BOE_NO VISIBLE NONE;DATE_ISSUED DATE_ISSUED VISIBLE NONE;LAST_ROUTINE LAST_ROUTINE VISIBLE NONE;transformed_address transformed_address VISIBLE NONE;geocoder geocoder VISIBLE NONE;ARC_SingleKey ARC_SingleKey VISIBLE NONE")
# Process: Add Field (2)
arcpy.AddField_management(geocoded_street_centerlines_successful, "geocoder", "TEXT", "", "", "", "", "NULLABLE", "NON_REQUIRED", "")
# Process: Calculate Field (2)
arcpy.CalculateField_management(geocoded_street_centerlines_successful__2_, "geocoder", "classifyGeocoder(!Status!)", "PYTHON", "def classifyGeocoder(Status):\\n if Status == \"M\" or Status == \"T\":\\n return \"SC\"\\n else:\\n return \"U\"")
# Process: Merge
arcpy.Merge_management("geocoded_master_successful;geocoded_street_centerlines_successful", final, "Status \"Status\" true true false 1 Text 0 0 ,First,#,geocoded_master_successful,Status,-1,-1,geocoded_street_centerlines_successful,Status,-1,-1;Score \"Score\" true true false 8 Double 0 0 ,First,#,geocoded_master_successful,Score,-1,-1,geocoded_street_centerlines_successful,Score,-1,-1;Match_type \"Match_type\" true true false 2 Text 0 0 ,First,#,geocoded_master_successful,Match_type,-1,-1,geocoded_street_centerlines_successful,Match_type,-1,-1;Match_addr \"Match_addr\" true true false 120 Text 0 0 ,First,#,geocoded_master_successful,Match_addr,-1,-1,geocoded_street_centerlines_successful,Match_addr,-1,-1;X \"X\" true true false 8 Double 0 0 ,First,#,geocoded_master_successful,X,-1,-1,geocoded_street_centerlines_successful,X,-1,-1;Y \"Y\" true true false 8 Double 0 0 ,First,#,geocoded_master_successful,Y,-1,-1,geocoded_street_centerlines_successful,Y,-1,-1;Xmin \"Xmin\" true true false 8 Double 0 0 ,First,#,geocoded_master_successful,Xmin,-1,-1,geocoded_street_centerlines_successful,Xmin,-1,-1;Xmax \"Xmax\" true true false 8 Double 0 0 ,First,#,geocoded_master_successful,Xmax,-1,-1,geocoded_street_centerlines_successful,Xmax,-1,-1;Ymin \"Ymin\" true true false 8 Double 0 0 ,First,#,geocoded_master_successful,Ymin,-1,-1,geocoded_street_centerlines_successful,Ymin,-1,-1;Ymax \"Ymax\" true true false 8 Double 0 0 ,First,#,geocoded_master_successful,Ymax,-1,-1,geocoded_street_centerlines_successful,Ymax,-1,-1;Addr_type \"Addr_type\" true true false 20 Text 0 0 ,First,#,geocoded_master_successful,Addr_type,-1,-1,geocoded_street_centerlines_successful,Addr_type,-1,-1;ARC_Single_Line_Input \"Key\" true true false 100 Text 0 0 ,First,#,geocoded_master_successful,ARC_Single_Line_Input,-1,-1,geocoded_street_centerlines_successful,ARC_Single_Line_Input,-1,-1;ST_NAME \"ST_NAME\" true true false 255 Text 0 0 ,First,#,geocoded_master_successful,ST_NAME,-1,-1,geocoded_street_centerlines_successful,ST_NAME,-1,-1;EXT \"EXT\" true true false 255 Text 0 0 ,First,#,geocoded_master_successful,EXT,-1,-1,geocoded_street_centerlines_successful,EXT,-1,-1;NUM \"NUM\" true true false 4 Long 0 0 ,First,#,geocoded_master_successful,NUM,-1,-1,geocoded_street_centerlines_successful,NUM,-1,-1;LETTER \"LETTER\" true true false 255 Text 0 0 ,First,#,geocoded_master_successful,LETTER,-1,-1,geocoded_street_centerlines_successful,LETTER,-1,-1;UNIT \"UNIT\" true true false 255 Text 0 0 ,First,#,geocoded_master_successful,UNIT,-1,-1,geocoded_street_centerlines_successful,UNIT,-1,-1;Address \"Address\" true true false 255 Text 0 0 ,First,#,geocoded_master_successful,Address,-1,-1,geocoded_street_centerlines_successful,Address,-1,-1;DBA \"DBA\" true true false 255 Text 0 0 ,First,#,geocoded_master_successful,DBA,-1,-1,geocoded_street_centerlines_successful,DBA,-1,-1;OWNER \"OWNER\" true true false 255 Text 0 0 ,First,#,geocoded_master_successful,OWNER,-1,-1,geocoded_street_centerlines_successful,OWNER,-1,-1;ZIP \"ZIP\" true true false 4 Long 0 0 ,First,#,geocoded_master_successful,ZIP,-1,-1,geocoded_street_centerlines_successful,ZIP,-1,-1;LONGITUDE \"LONGITUDE\" true true false 8 Double 0 0 ,First,#,geocoded_master_successful,LONGITUDE,-1,-1,geocoded_street_centerlines_successful,LONGITUDE,-1,-1;LATITUDE \"LATITUDE\" true true false 8 Double 0 0 ,First,#,geocoded_master_successful,LATITUDE,-1,-1,geocoded_street_centerlines_successful,LATITUDE,-1,-1;DISTRICT \"DISTRICT\" true true false 4 Long 0 0 ,First,#,geocoded_master_successful,DISTRICT,-1,-1,geocoded_street_centerlines_successful,DISTRICT,-1,-1;LOC_ID_NO \"LOC_ID_NO\" true true false 4 Long 0 0 ,First,#,geocoded_master_successful,LOC_ID_NO,-1,-1,geocoded_street_centerlines_successful,LOC_ID_NO,-1,-1;CERT_NO \"CERT_NO\" true true false 4 Long 0 0 ,First,#,geocoded_master_successful,CERT_NO,-1,-1,geocoded_street_centerlines_successful,CERT_NO,-1,-1;H31_ACCOUNT_NO \"H31_ACCOUNT_NO\" true true false 255 Text 0 0 ,First,#,geocoded_master_successful,H31_ACCOUNT_NO,-1,-1,geocoded_street_centerlines_successful,H31_ACCOUNT_NO,-1,-1;PERMIT_NO \"PERMIT NO\" true true false 255 Text 0 0 ,First,#,geocoded_master_successful,PERMIT_NO,-1,-1,geocoded_street_centerlines_successful,PERMIT_NO,-1,-1;BOE_NO \"BOE_NO\" true true false 4 Long 0 0 ,First,#,geocoded_master_successful,BOE_NO,-1,-1,geocoded_street_centerlines_successful,BOE_NO,-1,-1;DATE_ISSUED \"DATE_ISSUED\" true true false 8 Date 0 0 ,First,#,geocoded_master_successful,DATE_ISSUED,-1,-1,geocoded_street_centerlines_successful,DATE_ISSUED,-1,-1;LAST_ROUTINE \"LAST_ROUTINE\" true true false 8 Date 0 0 ,First,#,geocoded_master_successful,LAST_ROUTINE,-1,-1,geocoded_street_centerlines_successful,LAST_ROUTINE,-1,-1;transformed_address \"transformed_address\" true true false 255 Text 0 0 ,First,#,geocoded_master_successful,transformed_address,-1,-1,geocoded_street_centerlines_successful,transformed_address,-1,-1;geocoder \"geocoder\" true true false 20 Text 0 0 ,First,#,geocoded_master_successful,geocoder,-1,-1,geocoded_street_centerlines_successful,geocoder,-1,-1;ARC_SingleKey \"Key\" true true false 100 Text 0 0 ,First,#,geocoded_master_successful,ARC_SingleKey,-1,-1,geocoded_street_centerlines_successful,ARC_SingleKey,-1,-1;Side \"Side\" true true false 1 Text 0 0 ,First,#,geocoded_street_centerlines_successful,Side,-1,-1;Ref_ID \"Ref_ID\" true true false 50 Text 0 0 ,First,#,geocoded_street_centerlines_successful,Ref_ID,-1,-1;User_fld \"User_fld\" true true false 120 Text 0 0 ,First,#,geocoded_street_centerlines_successful,User_fld,-1,-1;Status_1 \"Status\" true true false 1 Text 0 0 ,First,#,geocoded_street_centerlines_successful,Status_1,-1,-1;Score_1 \"Score\" true true false 8 Double 0 0 ,First,#,geocoded_street_centerlines_successful,Score_1,-1,-1;Match_type_1 \"Match_type_1\" true true false 2 Text 0 0 ,First,#,geocoded_street_centerlines_successful,Match_type_1,-1,-1;Match_addr_1 \"Match_addr\" true true false 120 Text 0 0 ,First,#,geocoded_street_centerlines_successful,Match_addr_1,-1,-1;Addr_type_1 \"Addr_type\" true true false 20 Text 0 0 ,First,#,geocoded_street_centerlines_successful,Addr_type_1,-1,-1")
del geocoded_addresses_failed
del geocoded_street_centerlines_successful
del geocoded_master_successful
if __name__ == "__main__":
geocode()
print("\nAddresses Processed")
exit()
| 192.876404
| 6,192
| 0.77001
| 2,611
| 17,166
| 4.817694
| 0.06396
| 0.018284
| 0.078544
| 0.147468
| 0.823515
| 0.763733
| 0.66301
| 0.610303
| 0.569362
| 0.547261
| 0
| 0.037989
| 0.08913
| 17,166
| 88
| 6,193
| 195.068182
| 0.7665
| 0.041419
| 0
| 0
| 1
| 1.613636
| 0.80399
| 0.512331
| 0
| 0
| 0
| 0
| 0
| 1
| 0.022727
| false
| 0
| 0.022727
| 0
| 0.045455
| 0.022727
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 1
| 0
| 0
| 0
| 0
| 1
| 1
| 1
| 1
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 8
|
d368d5d24bf2225d28f402a35a87ce37e8001ce0
| 114
|
py
|
Python
|
tadataka/vector.py
|
IshitaTakeshi/Tadataka
|
852c7afb904503005e51884408e1492ef0be836f
|
[
"Apache-2.0"
] | 54
|
2019-11-15T16:30:34.000Z
|
2022-01-13T15:18:54.000Z
|
tadataka/vector.py
|
IshitaTakeshi/Tadataka
|
852c7afb904503005e51884408e1492ef0be836f
|
[
"Apache-2.0"
] | 11
|
2019-02-28T08:28:24.000Z
|
2020-04-07T04:47:12.000Z
|
tadataka/vector.py
|
IshitaTakeshi/Tadataka
|
852c7afb904503005e51884408e1492ef0be836f
|
[
"Apache-2.0"
] | 1
|
2020-02-26T13:59:40.000Z
|
2020-02-26T13:59:40.000Z
|
import numpy as np
def normalize_length(v):
# TODO avoid division by zero?
return v / np.linalg.norm(v)
| 16.285714
| 34
| 0.684211
| 19
| 114
| 4.052632
| 0.842105
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.22807
| 114
| 6
| 35
| 19
| 0.875
| 0.245614
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.166667
| 0
| 1
| 0.333333
| false
| 0
| 0.333333
| 0.333333
| 1
| 0
| 1
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 1
| 0
| 0
| 1
| 0
| 0
| 1
| 1
| 1
| 0
|
0
| 7
|
d36c8f503fc326e44a6f36f1303ac1a6502281b3
| 161
|
py
|
Python
|
blog/views.py
|
kimshangyup/start
|
b88b0e893c76969c00030d2bf28a623a905a84b8
|
[
"MIT"
] | null | null | null |
blog/views.py
|
kimshangyup/start
|
b88b0e893c76969c00030d2bf28a623a905a84b8
|
[
"MIT"
] | null | null | null |
blog/views.py
|
kimshangyup/start
|
b88b0e893c76969c00030d2bf28a623a905a84b8
|
[
"MIT"
] | null | null | null |
from django.shortcuts import render, render_to_response
# Create your views here.
def index(request):
return render_to_response('blog/index.html',locals())
| 26.833333
| 57
| 0.782609
| 23
| 161
| 5.304348
| 0.782609
| 0.131148
| 0.262295
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.118012
| 161
| 5
| 58
| 32.2
| 0.859155
| 0.142857
| 0
| 0
| 0
| 0
| 0.110294
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0.333333
| false
| 0
| 0.333333
| 0.333333
| 1
| 0
| 1
| 0
| 0
| null | 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 1
| 1
| 1
| 0
|
0
| 7
|
6c92bff579d579403005659c3735980cc2c97243
| 30,504
|
py
|
Python
|
models/flow/blocks.py
|
CompVis/behavior-driven-video-synthesis
|
ec090009dd56926ecbccfc73b77f979a50e34af1
|
[
"Apache-2.0"
] | 14
|
2021-03-04T08:31:08.000Z
|
2022-03-30T02:44:18.000Z
|
models/flow/blocks.py
|
CompVis/behavior-driven-video-synthesis
|
ec090009dd56926ecbccfc73b77f979a50e34af1
|
[
"Apache-2.0"
] | null | null | null |
models/flow/blocks.py
|
CompVis/behavior-driven-video-synthesis
|
ec090009dd56926ecbccfc73b77f979a50e34af1
|
[
"Apache-2.0"
] | 3
|
2021-03-08T15:35:52.000Z
|
2022-03-06T22:21:04.000Z
|
import torch
import torch.nn as nn
import torch.nn.functional as F
from lib.modules import BasicFullyConnectedNet, ActNorm, GINActNorm, GIN2ActNorm
class ConditionalFlow(nn.Module):
"""Flat version. Feeds an embedding into the flow in every block"""
def __init__(self, in_channels, embedding_dim, hidden_dim, hidden_depth,
n_flows, conditioning_option="none", activation='lrelu'):
super().__init__()
self.in_channels = in_channels
self.cond_channels = embedding_dim
self.mid_channels = hidden_dim
self.num_blocks = hidden_depth
self.n_flows = n_flows
self.conditioning_option = conditioning_option
self.sub_layers = nn.ModuleList()
if self.conditioning_option.lower() != "none":
self.conditioning_layers = nn.ModuleList()
for flow in range(self.n_flows):
self.sub_layers.append(ConditionalFlatDoubleCouplingFlowBlock(
self.in_channels, self.cond_channels, self.mid_channels,
self.num_blocks, activation=activation)
)
if self.conditioning_option.lower() != "none":
self.conditioning_layers.append(nn.Conv2d(self.cond_channels, self.cond_channels, 1))
def forward(self, x, embedding, reverse=False):
hconds = list()
hcond = embedding[:,:,None,None]
self.last_outs = []
self.last_logdets = []
for i in range(self.n_flows):
if self.conditioning_option.lower() == "parallel":
hcond = self.conditioning_layers[i](embedding)
elif self.conditioning_option.lower() == "sequential":
hcond = self.conditioning_layers[i](hcond)
hconds.append(hcond)
if not reverse:
logdet = 0.0
for i in range(self.n_flows):
x, logdet_ = self.sub_layers[i](x, hconds[i])
logdet = logdet + logdet_
self.last_outs.append(x)
self.last_logdets.append(logdet)
return x, logdet
else:
for i in reversed(range(self.n_flows)):
x = self.sub_layers[i](x, hconds[i], reverse=True)
return x
def reverse(self, out, xcond):
return self(out, xcond, reverse=True)
class UnconditionalFlow(nn.Module):
"""Flat"""
def __init__(self, in_channels, hidden_dim, hidden_depth, n_flows, activation='lrelu'):
super().__init__()
self.in_channels = in_channels
self.mid_channels = hidden_dim
self.num_blocks = hidden_depth
self.n_flows = n_flows
self.sub_layers = nn.ModuleList()
for flow in range(self.n_flows):
self.sub_layers.append(UnconditionalFlatDoubleCouplingFlowBlock(
self.in_channels, self.mid_channels,
self.num_blocks, activation=activation)
)
def forward(self, x, reverse=False):
self.last_outs = []
self.last_logdets = []
if not reverse:
logdet = 0.0
for i in range(self.n_flows):
x, logdet_ = self.sub_layers[i](x)
logdet = logdet + logdet_
self.last_outs.append(x)
self.last_logdets.append(logdet)
return x, logdet
else:
for i in reversed(range(self.n_flows)):
x = self.sub_layers[i](x, reverse=True)
return x
def reverse(self, out):
return self(out, reverse=True)
class UnconditionalFlow2(nn.Module):
"""Flat"""
def __init__(self, in_channels, hidden_dim, hidden_depth, n_flows):
super().__init__()
self.in_channels = in_channels
self.mid_channels = hidden_dim
self.num_blocks = hidden_depth
self.n_flows = n_flows
self.sub_layers = nn.ModuleList()
for flow in range(self.n_flows):
self.sub_layers.append(UnconditionalFlatDoubleCouplingFlowBlock2(
self.in_channels, self.mid_channels,
self.num_blocks)
)
def forward(self, x, reverse=False):
self.last_outs = []
self.last_logdets = []
if not reverse:
logdet = 0.0
for i in range(self.n_flows):
x, logdet_ = self.sub_layers[i](x)
logdet = logdet + logdet_
self.last_outs.append(x)
self.last_logdets.append(logdet)
return x, logdet
else:
for i in reversed(range(self.n_flows)):
x = self.sub_layers[i](x, reverse=True)
return x
def reverse(self, out):
return self(out, reverse=True)
class UnconditionalGINFlow(nn.Module):
"""Flat"""
def __init__(self, in_channels, hidden_dim, hidden_depth, n_flows):
super().__init__()
self.in_channels = in_channels
self.mid_channels = hidden_dim
self.num_blocks = hidden_depth
self.n_flows = n_flows
self.sub_layers = nn.ModuleList()
for flow in range(self.n_flows):
self.sub_layers.append(UnconditionalFlatDoubleCouplingGINFlowBlock(
self.in_channels, self.mid_channels,
self.num_blocks)
)
def forward(self, x, reverse=False):
self.last_outs = []
self.last_logdets = []
if not reverse:
logdet = 0.0
for i in range(self.n_flows):
x, logdet_ = self.sub_layers[i](x)
logdet = logdet + logdet_
self.last_outs.append(x)
self.last_logdets.append(logdet)
return x, logdet
else:
for i in reversed(range(self.n_flows)):
x = self.sub_layers[i](x, reverse=True)
return x
def reverse(self, out):
return self(out, reverse=True)
class UnconditionalGIN2Flow(nn.Module):
"""Flat"""
def __init__(self, in_channels, hidden_dim, hidden_depth, n_flows):
super().__init__()
self.in_channels = in_channels
self.mid_channels = hidden_dim
self.num_blocks = hidden_depth
self.n_flows = n_flows
self.sub_layers = nn.ModuleList()
for flow in range(self.n_flows):
self.sub_layers.append(UnconditionalFlatDoubleCouplingGIN2FlowBlock(
self.in_channels, self.mid_channels,
self.num_blocks)
)
def forward(self, x, reverse=False):
self.last_outs = []
self.last_logdets = []
if not reverse:
logdet = 0.0
for i in range(self.n_flows):
x, logdet_ = self.sub_layers[i](x)
logdet = logdet + logdet_
self.last_outs.append(x)
self.last_logdets.append(logdet)
return x, logdet
else:
for i in reversed(range(self.n_flows)):
x = self.sub_layers[i](x, reverse=True)
return x
def reverse(self, out):
return self(out, reverse=True)
class UnconditionalNICEFlow(nn.Module):
"""Flat"""
def __init__(self, in_channels, hidden_dim, hidden_depth, n_flows):
super().__init__()
self.in_channels = in_channels
self.mid_channels = hidden_dim
self.num_blocks = hidden_depth
self.n_flows = n_flows
self.sub_layers = nn.ModuleList()
for flow in range(self.n_flows):
self.sub_layers.append(UnconditionalFlatDoubleCouplingNICEFlowBlock(
self.in_channels, self.mid_channels,
self.num_blocks)
)
def forward(self, x, reverse=False):
self.last_outs = []
self.last_logdets = []
if not reverse:
logdet = 0.0
for i in range(self.n_flows):
x, logdet_ = self.sub_layers[i](x)
logdet = logdet + logdet_
self.last_outs.append(x)
self.last_logdets.append(logdet)
return x, logdet
else:
for i in reversed(range(self.n_flows)):
x = self.sub_layers[i](x, reverse=True)
return x
def reverse(self, out):
return self(out, reverse=True)
class DoubleVectorCouplingBlock(nn.Module):
"""In contrast to VectorCouplingBlock, this module assures alternating chunking in upper and lower half."""
def __init__(self, in_channels, hidden_dim, hidden_depth=2):
super(DoubleVectorCouplingBlock, self).__init__()
assert in_channels % 2 == 0
self.s = nn.ModuleList([BasicFullyConnectedNet(dim=in_channels // 2, depth=hidden_depth, hidden_dim=hidden_dim,
use_tanh=True) for _ in range(2)])
self.t = nn.ModuleList([BasicFullyConnectedNet(dim=in_channels // 2, depth=hidden_depth, hidden_dim=hidden_dim,
use_tanh=False) for _ in range(2)])
def forward(self, x, reverse=False):
assert len(x.shape) == 4
x = x.squeeze(-1).squeeze(-1)
if not reverse:
logdet = 0
for i in range(len(self.s)):
idx_apply, idx_keep = 0, 1
if i % 2 != 0:
x = torch.cat(torch.chunk(x, 2, dim=1)[::-1], dim=1)
x = torch.chunk(x, 2, dim=1)
scale = self.s[i](x[idx_apply])
x_ = x[idx_keep] * (scale.exp()) + self.t[i](x[idx_apply])
x = torch.cat((x[idx_apply], x_), dim=1)
logdet_ = torch.sum(scale.view(x.size(0), -1), dim=1)
logdet = logdet + logdet_
return x[:,:,None,None], logdet
else:
idx_apply, idx_keep = 0, 1
for i in reversed(range(len(self.s))):
if i % 2 == 0:
x = torch.cat(torch.chunk(x, 2, dim=1)[::-1], dim=1)
x = torch.chunk(x, 2, dim=1)
x_ = (x[idx_keep] - self.t[i](x[idx_apply])) * (self.s[i](x[idx_apply]).neg().exp())
x = torch.cat((x[idx_apply], x_), dim=1)
return x[:,:,None,None]
class DoubleVectorCouplingBlock2(nn.Module):
"""Support uneven inputs"""
def __init__(self, in_channels, hidden_dim, hidden_depth=2):
super().__init__()
dim1 = (in_channels // 2) + (in_channels % 2)
dim2 = in_channels // 2
self.s = nn.ModuleList([
BasicFullyConnectedNet(dim=dim1, out_dim=dim2, depth=hidden_depth,
hidden_dim=hidden_dim, use_tanh=True),
BasicFullyConnectedNet(dim=dim1, out_dim=dim2, depth=hidden_depth,
hidden_dim=hidden_dim, use_tanh=True),
])
self.t = nn.ModuleList([
BasicFullyConnectedNet(dim=dim1, out_dim=dim2, depth=hidden_depth,
hidden_dim=hidden_dim, use_tanh=False),
BasicFullyConnectedNet(dim=dim1, out_dim=dim2, depth=hidden_depth,
hidden_dim=hidden_dim, use_tanh=False),
])
def forward(self, x, reverse=False):
assert len(x.shape) == 4
x = x.squeeze(-1).squeeze(-1)
if not reverse:
logdet = 0
for i in range(len(self.s)):
idx_apply, idx_keep = 0, 1
if i % 2 != 0:
x = torch.cat(torch.chunk(x, 2, dim=1)[::-1], dim=1)
x = torch.chunk(x, 2, dim=1)
scale = self.s[i](x[idx_apply])
x_ = x[idx_keep] * (scale.exp()) + self.t[i](x[idx_apply])
x = torch.cat((x[idx_apply], x_), dim=1)
logdet_ = torch.sum(scale.view(x.size(0), -1), dim=1)
logdet = logdet + logdet_
return x[:,:,None,None], logdet
else:
idx_apply, idx_keep = 0, 1
for i in reversed(range(len(self.s))):
if i % 2 == 0:
x = torch.cat(torch.chunk(x, 2, dim=1)[::-1], dim=1)
x = torch.chunk(x, 2, dim=1)
x_ = (x[idx_keep] - self.t[i](x[idx_apply])) * (self.s[i](x[idx_apply]).neg().exp())
x = torch.cat((x[idx_apply], x_), dim=1)
return x[:,:,None,None]
class GINDoubleVectorCouplingBlock(nn.Module):
"""Volume preserving version. In contrast to VectorCouplingBlock, this module assures alternating chunking in upper and lower half."""
def __init__(self, in_channels, hidden_dim, hidden_depth=2):
super().__init__()
assert in_channels % 2 == 0
self.s = nn.ModuleList([BasicFullyConnectedNet(
dim=in_channels // 2, depth=hidden_depth, hidden_dim=hidden_dim,
out_dim=in_channels // 2 - 1,
use_tanh=True) for _ in range(2)])
self.t = nn.ModuleList([BasicFullyConnectedNet(
dim=in_channels // 2, depth=hidden_depth, hidden_dim=hidden_dim,
use_tanh=False) for _ in range(2)])
def get_scale(self, scale):
assert len(scale.shape) == 2
if scale.shape[1] == 0:
return torch.zeros(scale.shape[0],1)
lastscale = -torch.sum(scale, dim=1, keepdim=True)
scale = torch.cat((scale, lastscale), dim=1)
return scale
def forward(self, x, reverse=False):
assert len(x.shape) == 4
x = x.squeeze(-1).squeeze(-1)
if not reverse:
for i in range(len(self.s)):
idx_apply, idx_keep = 0, 1
if i % 2 != 0:
x = torch.cat(torch.chunk(x, 2, dim=1)[::-1], dim=1)
x = torch.chunk(x, 2, dim=1)
scale = self.s[i](x[idx_apply])
scale = self.get_scale(scale)
x_ = x[idx_keep] * (scale.exp()) + self.t[i](x[idx_apply])
x = torch.cat((x[idx_apply], x_), dim=1)
logdet = torch.zeros(x.shape[0]).to(x)
return x[:,:,None,None], logdet
else:
idx_apply, idx_keep = 0, 1
for i in reversed(range(len(self.s))):
if i % 2 == 0:
x = torch.cat(torch.chunk(x, 2, dim=1)[::-1], dim=1)
x = torch.chunk(x, 2, dim=1)
scale = self.s[i](x[idx_apply])
scale = self.get_scale(scale)
x_ = (x[idx_keep] - self.t[i](x[idx_apply])) * (scale.neg().exp())
x = torch.cat((x[idx_apply], x_), dim=1)
return x[:,:,None,None]
class GIN2DoubleVectorCouplingBlock(nn.Module):
"""Volume preserving version but uniform scaling. In contrast to VectorCouplingBlock, this module assures alternating chunking in upper and lower half."""
def __init__(self, in_channels, hidden_dim, hidden_depth=2):
super().__init__()
assert in_channels % 2 == 0
self.s = nn.ModuleList([BasicFullyConnectedNet(
dim=in_channels // 2, depth=hidden_depth, hidden_dim=hidden_dim,
out_dim=in_channels // 2,
use_tanh=True) for _ in range(2)])
self.t = nn.ModuleList([BasicFullyConnectedNet(
dim=in_channels // 2, depth=hidden_depth, hidden_dim=hidden_dim,
use_tanh=False) for _ in range(2)])
def get_scale(self, scale):
assert len(scale.shape) == 2
totalscale = torch.sum(scale, dim=1, keepdim=True)
scale = scale - totalscale
return scale
def forward(self, x, reverse=False):
assert len(x.shape) == 4
x = x.squeeze(-1).squeeze(-1)
if not reverse:
for i in range(len(self.s)):
idx_apply, idx_keep = 0, 1
if i % 2 != 0:
x = torch.cat(torch.chunk(x, 2, dim=1)[::-1], dim=1)
x = torch.chunk(x, 2, dim=1)
scale = self.s[i](x[idx_apply])
scale = self.get_scale(scale)
x_ = x[idx_keep] * (scale.exp()) + self.t[i](x[idx_apply])
x = torch.cat((x[idx_apply], x_), dim=1)
logdet = torch.zeros(x.shape[0]).to(x)
# TODO better return real logdet?
return x[:,:,None,None], logdet
else:
idx_apply, idx_keep = 0, 1
for i in reversed(range(len(self.s))):
if i % 2 == 0:
x = torch.cat(torch.chunk(x, 2, dim=1)[::-1], dim=1)
x = torch.chunk(x, 2, dim=1)
scale = self.s[i](x[idx_apply])
scale = self.get_scale(scale)
x_ = (x[idx_keep] - self.t[i](x[idx_apply])) * (scale.neg().exp())
x = torch.cat((x[idx_apply], x_), dim=1)
return x[:,:,None,None]
class NICEDoubleVectorCouplingBlock(nn.Module):
"""Volume preserving version by simply not scaling. In contrast to VectorCouplingBlock, this module assures alternating chunking in upper and lower half."""
def __init__(self, in_channels, hidden_dim, hidden_depth=2):
super().__init__()
assert in_channels % 2 == 0
self.t = nn.ModuleList([BasicFullyConnectedNet(
dim=in_channels // 2, depth=hidden_depth, hidden_dim=hidden_dim,
use_tanh=False) for _ in range(2)])
def forward(self, x, reverse=False):
assert len(x.shape) == 4
x = x.squeeze(-1).squeeze(-1)
if not reverse:
for i in range(len(self.t)):
idx_apply, idx_keep = 0, 1
if i % 2 != 0:
x = torch.cat(torch.chunk(x, 2, dim=1)[::-1], dim=1)
x = torch.chunk(x, 2, dim=1)
x_ = x[idx_keep] + self.t[i](x[idx_apply])
x = torch.cat((x[idx_apply], x_), dim=1)
logdet = torch.zeros(x.shape[0]).to(x)
return x[:,:,None,None], logdet
else:
idx_apply, idx_keep = 0, 1
for i in reversed(range(len(self.t))):
if i % 2 == 0:
x = torch.cat(torch.chunk(x, 2, dim=1)[::-1], dim=1)
x = torch.chunk(x, 2, dim=1)
x_ = (x[idx_keep] - self.t[i](x[idx_apply]))
x = torch.cat((x[idx_apply], x_), dim=1)
return x[:,:,None,None]
class ConditionalDoubleVectorCouplingBlock(nn.Module):
def __init__(self, in_channels, cond_channels, hidden_dim, depth=2):
super(ConditionalDoubleVectorCouplingBlock, self).__init__()
self.s = nn.ModuleList([
BasicFullyConnectedNet(dim=in_channels//2+cond_channels, depth=depth,
hidden_dim=hidden_dim, use_tanh=True,
out_dim=in_channels//2) for _ in range(2)])
self.t = nn.ModuleList([
BasicFullyConnectedNet(dim=in_channels//2+cond_channels, depth=depth,
hidden_dim=hidden_dim, use_tanh=False,
out_dim=in_channels//2) for _ in range(2)])
def forward(self, x, xc, reverse=False):
assert len(x.shape) == 4
assert len(xc.shape) == 4
x = x.squeeze(-1).squeeze(-1)
xc = xc.squeeze(-1).squeeze(-1)
if not reverse:
logdet = 0
for i in range(len(self.s)):
idx_apply, idx_keep = 0, 1
if i % 2 != 0:
x = torch.cat(torch.chunk(x, 2, dim=1)[::-1], dim=1)
x = torch.chunk(x, 2, dim=1)
conditioner_input = torch.cat((x[idx_apply], xc), dim=1)
scale = self.s[i](conditioner_input)
x_ = x[idx_keep] * scale.exp() + self.t[i](conditioner_input)
x = torch.cat((x[idx_apply], x_), dim=1)
logdet_ = torch.sum(scale, dim=1)
logdet = logdet + logdet_
return x[:,:,None,None], logdet
else:
idx_apply, idx_keep = 0, 1
for i in reversed(range(len(self.s))):
if i % 2 == 0:
x = torch.cat(torch.chunk(x, 2, dim=1)[::-1], dim=1)
x = torch.chunk(x, 2, dim=1)
conditioner_input = torch.cat((x[idx_apply], xc), dim=1)
x_ = (x[idx_keep] - self.t[i](conditioner_input)) * self.s[i](conditioner_input).neg().exp()
x = torch.cat((x[idx_apply], x_), dim=1)
return x[:,:,None,None]
class UnconditionalFlatDoubleCouplingFlowBlock(nn.Module):
def __init__(self, in_channels, hidden_dim, hidden_depth, activation="lrelu"):
super().__init__()
__possible_activations = {"lrelu": lambda: InvLeakyRelu(alpha=0.95), "none":IgnoreLeakyRelu}
self.norm_layer = ActNorm(in_channels, logdet=True)
self.coupling = DoubleVectorCouplingBlock(in_channels,
hidden_dim,
hidden_depth)
self.activation = __possible_activations[activation]()
self.shuffle = Shuffle(in_channels)
def forward(self, x, reverse=False):
if not reverse:
h = x
logdet = 0.0
h, ld = self.norm_layer(h)
logdet += ld
h, ld = self.activation(h)
logdet += ld
h, ld = self.coupling(h)
logdet += ld
h, ld = self.shuffle(h)
logdet += ld
return h, logdet
else:
h = x
h = self.shuffle(h, reverse=True)
h = self.coupling(h, reverse=True)
h = self.activation(h, reverse=True)
h = self.norm_layer(h, reverse=True)
return h
def reverse(self, out):
return self.forward(out, reverse=True)
class UnconditionalFlatDoubleCouplingFlowBlock2(nn.Module):
def __init__(self, in_channels, hidden_dim, hidden_depth):
super().__init__()
self.norm_layer = ActNorm(in_channels, logdet=True)
self.coupling = DoubleVectorCouplingBlock2(in_channels,
hidden_dim,
hidden_depth)
self.shuffle = Shuffle(in_channels)
def forward(self, x, reverse=False):
if not reverse:
h = x
logdet = 0.0
h, ld = self.norm_layer(h)
logdet += ld
h, ld = self.coupling(h)
logdet += ld
h, ld = self.shuffle(h)
logdet += ld
return h, logdet
else:
h = x
h = self.shuffle(h, reverse=True)
h = self.coupling(h, reverse=True)
h = self.norm_layer(h, reverse=True)
return h
def reverse(self, out):
return self.forward(out, reverse=True)
class UnconditionalFlatDoubleCouplingGINFlowBlock(nn.Module):
def __init__(self, in_channels, hidden_dim, hidden_depth):
super().__init__()
self.norm_layer = GINActNorm(in_channels, logdet=True)
self.coupling = GINDoubleVectorCouplingBlock(in_channels,
hidden_dim,
hidden_depth)
self.shuffle = Shuffle(in_channels)
def forward(self, x, reverse=False):
if not reverse:
h = x
logdet = 0.0
h, ld = self.norm_layer(h)
logdet += ld
h, ld = self.coupling(h)
logdet += ld
h, ld = self.shuffle(h)
logdet += ld
return h, logdet
else:
h = x
h = self.shuffle(h, reverse=True)
h = self.coupling(h, reverse=True)
h = self.norm_layer(h, reverse=True)
return h
def reverse(self, out):
return self.forward(out, reverse=True)
class UnconditionalFlatDoubleCouplingGIN2FlowBlock(nn.Module):
def __init__(self, in_channels, hidden_dim, hidden_depth):
super().__init__()
self.norm_layer = GIN2ActNorm(in_channels, logdet=True)
self.coupling = GIN2DoubleVectorCouplingBlock(in_channels,
hidden_dim,
hidden_depth)
self.shuffle = Shuffle(in_channels)
def forward(self, x, reverse=False):
if not reverse:
h = x
logdet = 0.0
h, ld = self.norm_layer(h)
logdet += ld
h, ld = self.coupling(h)
logdet += ld
h, ld = self.shuffle(h)
logdet += ld
return h, logdet
else:
h = x
h = self.shuffle(h, reverse=True)
h = self.coupling(h, reverse=True)
h = self.norm_layer(h, reverse=True)
return h
def reverse(self, out):
return self.forward(out, reverse=True)
class UnconditionalFlatDoubleCouplingNICEFlowBlock(nn.Module):
def __init__(self, in_channels, hidden_dim, hidden_depth):
super().__init__()
#self.norm_layer = GINActNorm(in_channels, logdet=True)
self.coupling = NICEDoubleVectorCouplingBlock(in_channels,
hidden_dim,
hidden_depth)
self.shuffle = Shuffle(in_channels)
def forward(self, x, reverse=False):
if not reverse:
h = x
logdet = 0.0
#h, ld = self.norm_layer(h)
#logdet += ld
h, ld = self.coupling(h)
logdet += ld
h, ld = self.shuffle(h)
logdet += ld
return h, logdet
else:
h = x
h = self.shuffle(h, reverse=True)
h = self.coupling(h, reverse=True)
#h = self.norm_layer(h, reverse=True)
return h
def reverse(self, out):
return self.forward(out, reverse=True)
class ConditionalFlatDoubleCouplingFlowBlock(nn.Module):
def __init__(self, in_channels, cond_channels, hidden_dim, hidden_depth, activation="lrelu"):
super().__init__()
__possible_activations = {"lrelu": InvLeakyRelu, "none":IgnoreLeakyRelu}
self.norm_layer = ActNorm(in_channels, logdet=True)
self.coupling = ConditionalDoubleVectorCouplingBlock(in_channels,
cond_channels,
hidden_dim,
hidden_depth)
self.activation = __possible_activations[activation]()
self.shuffle = Shuffle(in_channels)
def forward(self, x, xcond, reverse=False):
if not reverse:
h = x
logdet = 0.0
h, ld = self.norm_layer(h)
logdet += ld
h, ld = self.activation(h)
logdet += ld
h, ld = self.coupling(h, xcond)
logdet += ld
h, ld = self.shuffle(h)
logdet += ld
return h, logdet
else:
h = x
h = self.shuffle(h, reverse=True)
h = self.coupling(h, xcond, reverse=True)
h = self.activation(h, reverse=True)
h = self.norm_layer(h, reverse=True)
return h
def reverse(self, out, xcond):
return self.forward(out, xcond, reverse=True)
class Shuffle(nn.Module):
def __init__(self, in_channels, **kwargs):
super(Shuffle, self).__init__()
self.in_channels = in_channels
idx = torch.randperm(in_channels)
self.register_buffer('forward_shuffle_idx', nn.Parameter(idx, requires_grad=False))
self.register_buffer('backward_shuffle_idx', nn.Parameter(torch.argsort(idx), requires_grad=False))
def forward(self, x, reverse=False, conditioning=None):
if not reverse:
return x[:, self.forward_shuffle_idx, ...], 0
else:
return x[:, self.backward_shuffle_idx, ...]
class OrthogonalPermute(nn.Module):
"""For some orthogonal matrix O: O^(-1) = O^T"""
def __init__(self, in_channels, **kwargs):
super(OrthogonalPermute, self).__init__()
print('WARNING: OrthogonalPermute induces invertibility issues!?')
self.in_channels = in_channels
omatrix = torch.empty(in_channels, in_channels)
nn.init.orthogonal_(omatrix)
self.register_buffer('forward_orthogonal', omatrix)
self.register_buffer('backward_orthogonal', omatrix.t())
def forward(self, x, reverse=False):
twodim = False
if len(x.shape) == 2:
x = x.unsqueeze(2).unsqueeze(3)
twodim = True
if not reverse:
if not twodim:
return F.conv2d(x, self.forward_orthogonal.unsqueeze(2).unsqueeze(3)), 0
return F.conv2d(x, self.forward_orthogonal.unsqueeze(2).unsqueeze(3)).squeeze(), 0
else:
if not twodim:
return F.conv2d(x, self.backward_orthogonal.unsqueeze(2).unsqueeze(3))
return F.conv2d(x, self.backward_orthogonal.unsqueeze(2).unsqueeze(3)).squeeze()
class IgnoreLeakyRelu(nn.Module):
"""performs identity op."""
def __init__(self, alpha=0.9):
super().__init__()
def forward(self, input, reverse=False):
if reverse:
return self.reverse(input)
h = input
return h, 0.0
def reverse(self, input):
h = input
return h
class InvLeakyRelu(nn.Module):
def __init__(self, alpha=0.9):
super().__init__()
self.alpha = alpha
def forward(self, input, reverse=False):
if reverse:
return self.reverse(input)
scaling = (input >= 0).to(input) + (input < 0).to(input)*self.alpha
h = input*scaling
return h, 0.0
def reverse(self, input):
scaling = (input >= 0).to(input) + (input < 0).to(input)*self.alpha
h = input/scaling
return h
class InvParametricRelu(InvLeakyRelu):
def __init__(self, alpha=0.9):
super().__init__()
self.alpha = nn.Parameter(torch.tensor(alpha), requires_grad=True)
| 39.564202
| 160
| 0.539011
| 3,664
| 30,504
| 4.296943
| 0.052675
| 0.052719
| 0.034299
| 0.030869
| 0.839558
| 0.808244
| 0.797129
| 0.784489
| 0.768229
| 0.748539
| 0
| 0.015104
| 0.346709
| 30,504
| 770
| 161
| 39.615584
| 0.77494
| 0.028357
| 0
| 0.806354
| 0
| 0
| 0.006798
| 0
| 0
| 0
| 0
| 0.001299
| 0.019667
| 1
| 0.092284
| false
| 0
| 0.006051
| 0.018154
| 0.228442
| 0.001513
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 7
|
6ce585a888e755257caf727b2b635a6a089b7033
| 2,207
|
py
|
Python
|
test/exploits/hashes/collisions/test_php5.py
|
drjerry/acsploit
|
fbe07fb0eb651e3c5fc27a0dbdfcd0ec4c674381
|
[
"BSD-3-Clause"
] | 107
|
2018-05-03T16:53:01.000Z
|
2022-02-23T14:47:20.000Z
|
test/exploits/hashes/collisions/test_php5.py
|
drjerry/acsploit
|
fbe07fb0eb651e3c5fc27a0dbdfcd0ec4c674381
|
[
"BSD-3-Clause"
] | 7
|
2019-04-28T00:41:35.000Z
|
2021-05-04T20:35:54.000Z
|
test/exploits/hashes/collisions/test_php5.py
|
drjerry/acsploit
|
fbe07fb0eb651e3c5fc27a0dbdfcd0ec4c674381
|
[
"BSD-3-Clause"
] | 16
|
2019-03-29T12:39:16.000Z
|
2021-03-03T11:09:45.000Z
|
from exploits.hashes.collisions import php5
from exploits.hashes.collisions import php5_common
from test.exploits.dummy_output import DummyOutput
from input.chars import CharGenerator
def test_run_small_collision_count():
output = DummyOutput()
n_collisions = 10
hash_table_size = 2**32
target = '42'
php5.options['n_collisions'] = n_collisions
php5.options['target_type'] = 'image'
php5.options['target'] = target
php5.options['hash_table_size'] = hash_table_size
php5.run(CharGenerator(), output)
assert output.count() == n_collisions
for i in output:
assert php5_common.php_hash(i, hash_table_size) == int(target)
def test_run_larger_collision_count():
output = DummyOutput()
n_collisions = 20
hash_table_size = 2**32
target = '42'
php5.options['n_collisions'] = n_collisions
php5.options['target_type'] = 'image'
php5.options['target'] = target
php5.options['hash_table_size'] = hash_table_size
php5.run(CharGenerator(), output)
assert output.count() == n_collisions
for i in output:
assert php5_common.php_hash(i, hash_table_size) == int(target)
def test_preimage():
output = DummyOutput()
n_collisions = 10
hash_table_size = 2**32
preimage_target = 'hello world'
php5.options['n_collisions'] = n_collisions
php5.options['target_type'] = 'preimage'
php5.options['target'] = preimage_target
php5.options['hash_table_size'] = hash_table_size
php5.run(CharGenerator(), output)
target = php5_common.php_hash(preimage_target, hash_table_size)
assert output.count() == n_collisions
for i in output:
assert php5_common.php_hash(i, hash_table_size) == target
def test_run_small_hash_table_size():
output = DummyOutput()
n_collisions = 10
hash_table_size = 100
target = '42'
php5.options['n_collisions'] = n_collisions
php5.options['target_type'] = 'image'
php5.options['target'] = target
php5.options['hash_table_size'] = hash_table_size
php5.run(CharGenerator(), output)
assert output.count() == n_collisions
for i in output:
assert php5_common.php_hash(i, hash_table_size) == int(target)
| 33.439394
| 70
| 0.705936
| 294
| 2,207
| 5.017007
| 0.146259
| 0.109831
| 0.158644
| 0.057627
| 0.823051
| 0.823051
| 0.733559
| 0.733559
| 0.704407
| 0.704407
| 0
| 0.029412
| 0.183507
| 2,207
| 65
| 71
| 33.953846
| 0.789123
| 0
| 0
| 0.736842
| 0
| 0
| 0.09787
| 0
| 0
| 0
| 0
| 0
| 0.140351
| 1
| 0.070175
| false
| 0
| 0.070175
| 0
| 0.140351
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 7
|
4c913c3894e61bdea270e4ac44fa2e0e31a13f7b
| 161
|
py
|
Python
|
app/core/admin.py
|
konradgalczynski07/django-rest-api
|
5dced82b45a1c5d591f2f07bbf1a3ab01e797997
|
[
"MIT"
] | 58
|
2019-08-11T04:48:54.000Z
|
2022-03-17T14:34:51.000Z
|
app/core/admin.py
|
konradgalczynski07/django-rest-api
|
5dced82b45a1c5d591f2f07bbf1a3ab01e797997
|
[
"MIT"
] | null | null | null |
app/core/admin.py
|
konradgalczynski07/django-rest-api
|
5dced82b45a1c5d591f2f07bbf1a3ab01e797997
|
[
"MIT"
] | 20
|
2020-01-09T17:30:42.000Z
|
2022-01-25T08:14:17.000Z
|
from django.contrib import admin
from core import models
admin.site.register(models.User)
admin.site.register(models.Post)
admin.site.register(models.Comment)
| 20.125
| 35
| 0.819876
| 24
| 161
| 5.5
| 0.5
| 0.204545
| 0.386364
| 0.522727
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.080745
| 161
| 7
| 36
| 23
| 0.891892
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| true
| 0
| 0.4
| 0
| 0.4
| 0
| 1
| 0
| 0
| null | 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 0
| 0
| 0
|
0
| 7
|
4cb8310b6d672d5a7f762748286ee08b9ac9bdda
| 6,410
|
py
|
Python
|
pepdb/core/migrations/0086_auto_20160419_0250.py
|
dchaplinsky/pep.org.ua
|
8633a65fb657d7f04dbdb12eb8ae705fa6be67e3
|
[
"MIT"
] | 7
|
2015-12-21T03:52:46.000Z
|
2020-07-24T19:17:23.000Z
|
pepdb/core/migrations/0086_auto_20160419_0250.py
|
dchaplinsky/pep.org.ua
|
8633a65fb657d7f04dbdb12eb8ae705fa6be67e3
|
[
"MIT"
] | 12
|
2016-03-05T18:11:05.000Z
|
2021-06-17T20:20:03.000Z
|
pepdb/core/migrations/0086_auto_20160419_0250.py
|
dchaplinsky/pep.org.ua
|
8633a65fb657d7f04dbdb12eb8ae705fa6be67e3
|
[
"MIT"
] | 4
|
2016-07-17T20:19:38.000Z
|
2021-03-23T12:47:20.000Z
|
# -*- coding: utf-8 -*-
from __future__ import unicode_literals
from django.db import migrations, models
import redactor.fields
class Migration(migrations.Migration):
dependencies = [
('core', '0085_auto_20160418_0327'),
]
operations = [
migrations.AddField(
model_name='company',
name='appt_en',
field=models.CharField(max_length=50, null=True, verbose_name='\u2116 \u0431\u0443\u0434\u0438\u043d\u043a\u0443, \u043e\u0444\u0456\u0441\u0443', blank=True),
),
migrations.AddField(
model_name='company',
name='appt_uk',
field=models.CharField(max_length=50, null=True, verbose_name='\u2116 \u0431\u0443\u0434\u0438\u043d\u043a\u0443, \u043e\u0444\u0456\u0441\u0443', blank=True),
),
migrations.AddField(
model_name='company',
name='bank_name_en',
field=redactor.fields.RedactorField(null=True, verbose_name='\u0424\u0456\u043d\u0430\u043d\u0441\u043e\u0432\u0430 \u0456\u043d\u0444\u043e\u0440\u043c\u0430\u0446\u0456\u044f', blank=True),
),
migrations.AddField(
model_name='company',
name='bank_name_uk',
field=redactor.fields.RedactorField(null=True, verbose_name='\u0424\u0456\u043d\u0430\u043d\u0441\u043e\u0432\u0430 \u0456\u043d\u0444\u043e\u0440\u043c\u0430\u0446\u0456\u044f', blank=True),
),
migrations.AddField(
model_name='company',
name='city_en',
field=models.CharField(max_length=255, null=True, verbose_name='\u041c\u0456\u0441\u0442\u043e', blank=True),
),
migrations.AddField(
model_name='company',
name='city_uk',
field=models.CharField(max_length=255, null=True, verbose_name='\u041c\u0456\u0441\u0442\u043e', blank=True),
),
migrations.AddField(
model_name='company',
name='other_founders_en',
field=redactor.fields.RedactorField(help_text='\u0427\u0435\u0440\u0435\u0437 \u043a\u043e\u043c\u0443, \u043d\u0435 PEP', null=True, verbose_name='\u0406\u043d\u0448\u0456 \u0437\u0430\u0441\u043d\u043e\u0432\u043d\u0438\u043a\u0438', blank=True),
),
migrations.AddField(
model_name='company',
name='other_founders_uk',
field=redactor.fields.RedactorField(help_text='\u0427\u0435\u0440\u0435\u0437 \u043a\u043e\u043c\u0443, \u043d\u0435 PEP', null=True, verbose_name='\u0406\u043d\u0448\u0456 \u0437\u0430\u0441\u043d\u043e\u0432\u043d\u0438\u043a\u0438', blank=True),
),
migrations.AddField(
model_name='company',
name='other_managers_en',
field=redactor.fields.RedactorField(help_text='\u0427\u0435\u0440\u0435\u0437 \u043a\u043e\u043c\u0443, \u043d\u0435 PEP', null=True, verbose_name='\u0406\u043d\u0448\u0456 \u043a\u0435\u0440\u0443\u044e\u0447\u0456', blank=True),
),
migrations.AddField(
model_name='company',
name='other_managers_uk',
field=redactor.fields.RedactorField(help_text='\u0427\u0435\u0440\u0435\u0437 \u043a\u043e\u043c\u0443, \u043d\u0435 PEP', null=True, verbose_name='\u0406\u043d\u0448\u0456 \u043a\u0435\u0440\u0443\u044e\u0447\u0456', blank=True),
),
migrations.AddField(
model_name='company',
name='other_owners_en',
field=redactor.fields.RedactorField(help_text='\u0427\u0435\u0440\u0435\u0437 \u043a\u043e\u043c\u0443, \u043d\u0435 PEP', null=True, verbose_name='\u0406\u043d\u0448\u0456 \u0432\u043b\u0430\u0441\u043d\u0438\u043a\u0438', blank=True),
),
migrations.AddField(
model_name='company',
name='other_owners_uk',
field=redactor.fields.RedactorField(help_text='\u0427\u0435\u0440\u0435\u0437 \u043a\u043e\u043c\u0443, \u043d\u0435 PEP', null=True, verbose_name='\u0406\u043d\u0448\u0456 \u0432\u043b\u0430\u0441\u043d\u0438\u043a\u0438', blank=True),
),
migrations.AddField(
model_name='company',
name='other_recipient_en',
field=models.CharField(help_text='\u042f\u043a\u0449\u043e \u043d\u0435 \u0454 PEP\u043e\u043c', max_length=200, null=True, verbose_name='\u0411\u0435\u043d\u0435\u0444\u0456\u0446\u0456\u0430\u0440\u0456\u0439', blank=True),
),
migrations.AddField(
model_name='company',
name='other_recipient_uk',
field=models.CharField(help_text='\u042f\u043a\u0449\u043e \u043d\u0435 \u0454 PEP\u043e\u043c', max_length=200, null=True, verbose_name='\u0411\u0435\u043d\u0435\u0444\u0456\u0446\u0456\u0430\u0440\u0456\u0439', blank=True),
),
migrations.AddField(
model_name='company',
name='sanctions_en',
field=redactor.fields.RedactorField(null=True, verbose_name='\u0421\u0430\u043d\u043a\u0446\u0456\u0457', blank=True),
),
migrations.AddField(
model_name='company',
name='sanctions_uk',
field=redactor.fields.RedactorField(null=True, verbose_name='\u0421\u0430\u043d\u043a\u0446\u0456\u0457', blank=True),
),
migrations.AddField(
model_name='company',
name='street_en',
field=models.CharField(max_length=100, null=True, verbose_name='\u0412\u0443\u043b\u0438\u0446\u044f', blank=True),
),
migrations.AddField(
model_name='company',
name='street_uk',
field=models.CharField(max_length=100, null=True, verbose_name='\u0412\u0443\u043b\u0438\u0446\u044f', blank=True),
),
migrations.AddField(
model_name='company',
name='wiki_en',
field=redactor.fields.RedactorField(null=True, verbose_name='\u0412\u0456\u043a\u0456-\u0441\u0442\u0430\u0442\u0442\u044f', blank=True),
),
migrations.AddField(
model_name='company',
name='wiki_uk',
field=redactor.fields.RedactorField(null=True, verbose_name='\u0412\u0456\u043a\u0456-\u0441\u0442\u0430\u0442\u0442\u044f', blank=True),
),
migrations.AddField(
model_name='person',
name='related_companies',
field=models.ManyToManyField(to='core.Company', through='core.Person2Company'),
),
]
| 52.975207
| 260
| 0.648206
| 757
| 6,410
| 5.361955
| 0.130779
| 0.093126
| 0.118995
| 0.13969
| 0.933974
| 0.933974
| 0.931017
| 0.919685
| 0.919685
| 0.907366
| 0
| 0.227138
| 0.21014
| 6,410
| 120
| 261
| 53.416667
| 0.574561
| 0.003276
| 0
| 0.719298
| 0
| 0.157895
| 0.367152
| 0.266322
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| false
| 0
| 0.026316
| 0
| 0.052632
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 8
|
4cfe23c3ce31345e93907a07a32656e1678e6663
| 8,091
|
py
|
Python
|
tests/layers/test_attention_layers.py
|
pmichel31415/dynet-nn
|
7e780c4dcf928cdff5f2e52210409d2775ca7796
|
[
"MIT"
] | 1
|
2019-09-04T13:19:47.000Z
|
2019-09-04T13:19:47.000Z
|
tests/layers/test_attention_layers.py
|
pmichel31415/dynn
|
7e780c4dcf928cdff5f2e52210409d2775ca7796
|
[
"MIT"
] | null | null | null |
tests/layers/test_attention_layers.py
|
pmichel31415/dynn
|
7e780c4dcf928cdff5f2e52210409d2775ca7796
|
[
"MIT"
] | null | null | null |
#!/usr/bin/env python3
import unittest
from unittest import TestCase
import numpy as np
import dynet as dy
from dynn.layers import attention_layers
class TestMLPAttention(TestCase):
def setUp(self):
self.pc = dy.ParameterCollection()
self.L = 6
self.dk = 3
self.dq = 4
self.dv = 5
self.dh = 2
self.bsz = self.L
self.lengths = list(range(1, self.bsz+1))
self.mask = [[-np.inf if i >= length else 0 for length in self.lengths]
for i in range(self.L)]
self.dropout = 0.1
def test_attend(self):
# Create compact lstm
attend = attention_layers.MLPAttention(
self.pc,
self.dq,
self.dk,
self.dh,
dropout=self.dropout
)
# Initialize computation graph
dy.renew_cg()
# Create inputs
query = dy.random_uniform(self.dq, -1, 1, batch_size=self.bsz)
keys = dy.random_uniform((self.dk, self.L), -1, 1, batch_size=self.bsz)
values = dy.random_uniform(
(self.dv, self.L), -1, 1, batch_size=self.bsz)
mask = dy.inputTensor(self.mask, batched=True)
# Initialize layer
attend.init(test=False, update=True)
# Run lstm cell
y, scores = attend(query, keys, values, mask=mask)
# Try forward/backward
z = dy.mean_batches(dy.sum_elems(y))
z.forward()
z.backward(full=True)
# Check dimension
self.assertTupleEqual(y.dim()[0], (self.dv,))
self.assertEqual(y.dim()[1], self.bsz)
# Check score values
score_sums = dy.sum_elems(scores).npvalue()
self.assertTrue(np.allclose(score_sums, np.ones(self.bsz)))
# Check masking
gradients = values.gradient()
for b, length in enumerate(self.lengths):
grad_elem = gradients[:, :, b].T
for pos, g_val in enumerate(grad_elem):
is_masked = pos >= length
zero_grad = np.allclose(g_val, 0)
self.assertEqual(is_masked, zero_grad)
class TestBilinearAttention(TestCase):
def setUp(self):
self.pc = dy.ParameterCollection()
self.L = 6
self.l_ = 7
self.dk = 3
self.dq = 4
self.dv = 5
self.dh = 2
self.bsz = self.L
self.lengths = list(range(1, self.bsz+1))
self.mask = [[-np.inf if i >= length else 0 for length in self.lengths]
for i in range(self.L)]
self.dropout = 0.1
def test_attend_dot(self):
# Create compact lstm
attend = attention_layers.BilinearAttention(
self.pc,
self.dq,
self.dq,
dot_product=True,
dropout=self.dropout
)
# Initialize computation graph
dy.renew_cg()
# Create inputs
query = dy.random_uniform(
(self.dq, self.l_), - 1, 1, batch_size=self.bsz)
keys = dy.random_uniform((self.dq, self.L), -1, 1, batch_size=self.bsz)
values = dy.random_uniform(
(self.dv, self.L), -1, 1, batch_size=self.bsz)
mask = dy.inputTensor(self.mask, batched=True)
# Initialize layer
attend.init(test=False, update=True)
# Run lstm cell
y, scores = attend(query, keys, values, mask=mask)
# Try forward/backward
z = dy.mean_batches(dy.sum_elems(y))
z.forward()
z.backward(full=True)
# Check dimension
self.assertTupleEqual(y.dim()[0], (self.dv, self.l_))
self.assertEqual(y.dim()[1], self.bsz)
# Check score values
score_sums = dy.sum_dim(scores, d=[0]).npvalue()
self.assertTrue(np.allclose(score_sums, np.ones((self.l_, self.bsz))))
# Check masking
gradients = values.gradient()
for b, length in enumerate(self.lengths):
grad_elem = gradients[:, :, b].T
for pos, g_val in enumerate(grad_elem):
is_masked = pos >= length
zero_grad = np.allclose(g_val, 0)
self.assertEqual(is_masked, zero_grad)
def test_attend_bilinear(self):
# Create compact lstm
attend = attention_layers.BilinearAttention(
self.pc,
self.dq,
self.dk,
dot_product=False,
dropout=self.dropout
)
# Initialize computation graph
dy.renew_cg()
# Create inputs
query = dy.random_uniform(
(self.dq, self.l_), - 1, 1, batch_size=self.bsz)
keys = dy.random_uniform((self.dk, self.L), -1, 1, batch_size=self.bsz)
values = dy.random_uniform(
(self.dv, self.L), -1, 1, batch_size=self.bsz)
mask = dy.inputTensor(self.mask, batched=True)
# Initialize layer
attend.init(test=False, update=True)
# Run lstm cell
y, scores = attend(query, keys, values, mask=mask)
# Try forward/backward
z = dy.mean_batches(dy.sum_elems(y))
z.forward()
z.backward(full=True)
# Check dimension
self.assertTupleEqual(y.dim()[0], (self.dv, self.l_))
self.assertEqual(y.dim()[1], self.bsz)
# Check score values
score_sums = dy.sum_dim(scores, d=[0]).npvalue()
self.assertTrue(np.allclose(score_sums, np.ones((self.l_, self.bsz))))
# Check masking
gradients = values.gradient()
for b, length in enumerate(self.lengths):
grad_elem = gradients[:, :, b].T
for pos, g_val in enumerate(grad_elem):
is_masked = pos >= length
zero_grad = np.allclose(g_val, 0)
self.assertEqual(is_masked, zero_grad)
class TestMultiHeadAttention(TestCase):
def setUp(self):
self.pc = dy.ParameterCollection()
self.dk = 3
self.dq = 4
self.dv = 5
# Attention layer values
self.dh = 8
self.nh = 4
self.do = 2
# Lengths
self.L = 6
self.l_ = 7
self.bsz = self.L
self.lengths = list(range(1, self.bsz+1))
self.mask = [[-np.inf if i >= length else 0 for length in self.lengths]
for i in range(self.L)]
self.dropout = 0.1
def test_attend(self):
# Create compact lstm
attend = attention_layers.MultiHeadAttention(
self.pc,
self.nh,
self.dq,
self.dk,
self.dv,
self.dh,
self.do,
dropout=self.dropout
)
# Initialize computation graph
dy.renew_cg()
# Create inputs
queries = dy.random_uniform(
(self.dq, self.l_), - 1, 1, batch_size=self.bsz)
keys = dy.random_uniform((self.dk, self.L), -1, 1, batch_size=self.bsz)
values = dy.random_uniform(
(self.dv, self.L), -1, 1, batch_size=self.bsz)
mask = dy.inputTensor(self.mask, batched=True)
# Initialize layer
attend.init(test=False, update=True)
# Run lstm cell
y, weights = attend(queries, keys, values, mask=mask)
# Try forward/backward
z = dy.mean_batches(dy.sum_elems(y))
z.forward()
z.backward(full=True)
# Check dimension
self.assertTupleEqual(y.dim()[0], (self.do, self.l_))
self.assertEqual(y.dim()[1], self.bsz)
# Check weights values
for head in range(self.nh):
weights_sums = dy.sum_dim(weights[head], d=[0]).npvalue()
self.assertTrue(np.allclose(
weights_sums, np.ones((self.l_, self.bsz))))
# Check masking
gradients = values.gradient()
for b, length in enumerate(self.lengths):
grad_elem = gradients[:, :, b].T
for pos, g_val in enumerate(grad_elem):
is_masked = pos >= length
zero_grad = np.allclose(g_val, 0)
self.assertEqual(is_masked, zero_grad)
if __name__ == '__main__':
unittest.main()
| 34.283898
| 79
| 0.556791
| 1,033
| 8,091
| 4.252662
| 0.124879
| 0.031869
| 0.024585
| 0.051901
| 0.891191
| 0.886638
| 0.882085
| 0.871386
| 0.871386
| 0.855907
| 0
| 0.013578
| 0.326412
| 8,091
| 235
| 80
| 34.429787
| 0.792477
| 0.087752
| 0
| 0.786517
| 0
| 0
| 0.00109
| 0
| 0
| 0
| 0
| 0
| 0.089888
| 1
| 0.039326
| false
| 0
| 0.02809
| 0
| 0.08427
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 7
|
2c725032638852b67e438d209be668b0e9b927a8
| 46
|
py
|
Python
|
models/__init__.py
|
tczhangzhi/PyTorch-SAGCN
|
d302e2a837f9b05e478fb2daae1ddd2a4988449f
|
[
"MIT"
] | 1
|
2021-11-26T02:33:26.000Z
|
2021-11-26T02:33:26.000Z
|
models/__init__.py
|
tczhangzhi/PyTorch-SAGCN
|
d302e2a837f9b05e478fb2daae1ddd2a4988449f
|
[
"MIT"
] | null | null | null |
models/__init__.py
|
tczhangzhi/PyTorch-SAGCN
|
d302e2a837f9b05e478fb2daae1ddd2a4988449f
|
[
"MIT"
] | null | null | null |
from .sagcn import *
from .sagcn_mean import *
| 23
| 25
| 0.76087
| 7
| 46
| 4.857143
| 0.571429
| 0.529412
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.152174
| 46
| 2
| 25
| 23
| 0.871795
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| true
| 0
| 1
| 0
| 1
| 0
| 1
| 1
| 0
| null | 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 0
| 1
| 0
|
0
| 7
|
2cb79cb5f003e9d69a8cd3f163abba17d6413644
| 172,158
|
py
|
Python
|
lte/protos/oai/nas_state_pb2.py
|
aweimeow/enodebd
|
e1cd20693153e6b85e5d1bf9d21af2501c358601
|
[
"Apache-2.0",
"BSD-3-Clause"
] | null | null | null |
lte/protos/oai/nas_state_pb2.py
|
aweimeow/enodebd
|
e1cd20693153e6b85e5d1bf9d21af2501c358601
|
[
"Apache-2.0",
"BSD-3-Clause"
] | null | null | null |
lte/protos/oai/nas_state_pb2.py
|
aweimeow/enodebd
|
e1cd20693153e6b85e5d1bf9d21af2501c358601
|
[
"Apache-2.0",
"BSD-3-Clause"
] | null | null | null |
# -*- coding: utf-8 -*-
# Generated by the protocol buffer compiler. DO NOT EDIT!
# source: lte/protos/oai/nas_state.proto
"""Generated protocol buffer code."""
from google.protobuf import descriptor as _descriptor
from google.protobuf import message as _message
from google.protobuf import reflection as _reflection
from google.protobuf import symbol_database as _symbol_database
# @@protoc_insertion_point(imports)
_sym_db = _symbol_database.Default()
from lte.protos.oai import common_types_pb2 as lte_dot_protos_dot_oai_dot_common__types__pb2
from lte.protos.oai import spgw_state_pb2 as lte_dot_protos_dot_oai_dot_spgw__state__pb2
DESCRIPTOR = _descriptor.FileDescriptor(
name='lte/protos/oai/nas_state.proto',
package='magma.lte.oai',
syntax='proto3',
serialized_options=b'Z\035magma/lte/cloud/go/protos/oai',
create_key=_descriptor._internal_create_key,
serialized_pb=b'\n\x1elte/protos/oai/nas_state.proto\x12\rmagma.lte.oai\x1a!lte/protos/oai/common_types.proto\x1a\x1flte/protos/oai/spgw_state.proto\" \n\x05Timer\x12\n\n\x02id\x18\x01 \x01(\x04\x12\x0b\n\x03sec\x18\x02 \x01(\x04\"2\n\rIdentityTuple\x12\r\n\x05value\x18\x01 \x01(\x0c\x12\x12\n\nnum_digits\x18\x02 \x01(\r\"\xc3\x01\n\x0ePartialTaiList\x12\x14\n\x0ctype_of_list\x18\x01 \x01(\r\x12\x1a\n\x12number_of_elements\x18\x02 \x01(\r\x12)\n\rtai_many_plmn\x18\x03 \x03(\x0b\x32\x12.magma.lte.oai.Tai\x12\x39\n\x1dtai_one_plmn_consecutive_tacs\x18\x04 \x01(\x0b\x32\x12.magma.lte.oai.Tai\x12\x0c\n\x04plmn\x18\x05 \x01(\x0c\x12\x0b\n\x03tac\x18\x06 \x03(\r\"Z\n\x07TaiList\x12\x15\n\rnumberoflists\x18\x01 \x01(\r\x12\x38\n\x11partial_tai_lists\x18\x02 \x03(\x0b\x32\x1d.magma.lte.oai.PartialTaiList\"#\n\x03Tai\x12\x0f\n\x07mcc_mnc\x18\x01 \x01(\x0c\x12\x0b\n\x03tac\x18\x02 \x01(\r\"-\n\x0bNasBaseProc\x12\x10\n\x08nas_puid\x18\x01 \x01(\x04\x12\x0c\n\x04type\x18\x02 \x01(\r\"i\n\nNasEmmProc\x12-\n\tbase_proc\x18\x01 \x01(\x0b\x32\x1a.magma.lte.oai.NasBaseProc\x12\x0c\n\x04type\x18\x02 \x01(\r\x12\x1e\n\x16previous_emm_fsm_state\x18\x03 \x01(\r\"\xab\x01\n\x0c\x41uthInfoProc\x12\x14\n\x0crequest_sent\x18\x01 \x01(\x08\x12)\n\x06vector\x18\x02 \x03(\x0b\x32\x19.magma.lte.oai.AuthVector\x12\x11\n\tnas_cause\x18\x03 \x01(\r\x12\r\n\x05ue_id\x18\x04 \x01(\r\x12\x0e\n\x06resync\x18\x05 \x01(\x08\x12(\n\ttimer_s6a\x18\xc8\x01 \x01(\x0b\x32\x14.magma.lte.oai.Timer\"\x81\x01\n\tNasCnProc\x12-\n\tbase_proc\x18\x01 \x01(\x0b\x32\x1a.magma.lte.oai.NasBaseProc\x12\x35\n\x0e\x61uth_info_proc\x18\x02 \x01(\x0b\x32\x1b.magma.lte.oai.AuthInfoProcH\x00\x42\x0e\n\x0cMessageTypes\"\x15\n\x13MobileStaClassmark2\"\x11\n\x0fVoicePreference\"\x9f\x01\n\x12NasMsgDecodeStatus\x12#\n\x1bintegrity_protected_message\x18\x01 \x01(\r\x12\x18\n\x10\x63iphered_message\x18\x02 \x01(\r\x12\x13\n\x0bmac_matched\x18\x03 \x01(\r\x12\"\n\x1asecurity_context_available\x18\x04 \x01(\r\x12\x11\n\temm_cause\x18\x05 \x01(\r\"a\n\x0c\x44rxParameter\x12\x18\n\x10splitpgcyclecode\x18\x01 \x01(\r\x12\r\n\x05\x63ndrx\x18\x02 \x01(\r\x12\x13\n\x0bsplitonccch\x18\x03 \x01(\r\x12\x13\n\x0bnondrxtimer\x18\x04 \x01(\r\"\x93\x06\n\x10\x41ttachRequestIes\x12\x12\n\nis_initial\x18\x01 \x01(\x08\x12\x0c\n\x04type\x18\x02 \x01(\r\x12\x1e\n\x16\x61\x64\x64itional_update_type\x18\x03 \x01(\r\x12\x14\n\x0cis_native_sc\x18\x04 \x01(\x08\x12\x0b\n\x03ksi\x18\x05 \x01(\r\x12\x16\n\x0eis_native_guti\x18\x06 \x01(\x08\x12!\n\x04guti\x18\x07 \x01(\x0b\x32\x13.magma.lte.oai.Guti\x12*\n\x04imsi\x18\x08 \x01(\x0b\x32\x1c.magma.lte.oai.IdentityTuple\x12*\n\x04imei\x18\t \x01(\x0b\x32\x1c.magma.lte.oai.IdentityTuple\x12,\n\x10last_visited_tai\x18\n \x01(\x0b\x32\x12.magma.lte.oai.Tai\x12&\n\norigin_tai\x18\x0b \x01(\x0b\x32\x12.magma.lte.oai.Tai\x12(\n\x0borigin_ecgi\x18\x0c \x01(\x0b\x32\x13.magma.lte.oai.Ecgi\x12<\n\x10ue_nw_capability\x18\r \x01(\x0b\x32\".magma.lte.oai.UeNetworkCapability\x12\x32\n\rdrx_parameter\x18\x0e \x01(\x0b\x32\x1b.magma.lte.oai.DrxParameter\x12\x0f\n\x07\x65sm_msg\x18\x0f \x01(\x0c\x12\x38\n\rdecode_status\x18\x10 \x01(\x0b\x32!.magma.lte.oai.NasMsgDecodeStatus\x12\x36\n\nclassmark2\x18\x11 \x01(\x0b\x32\".magma.lte.oai.MobileStaClassmark2\x12\x38\n\x10voice_preference\x18\x12 \x01(\x0b\x32\x1e.magma.lte.oai.VoicePreference\x12X\n!ue_additional_security_capability\x18\x13 \x01(\x0b\x32-.magma.lte.oai.UeAdditionalSecurityCapability\"\xa1\x02\n\nAttachProc\x12\x1a\n\x12\x61ttach_accept_sent\x18\x01 \x01(\r\x12\x1a\n\x12\x61ttach_reject_sent\x18\x02 \x01(\x08\x12 \n\x18\x61ttach_complete_received\x18\x03 \x01(\x08\x12!\n\x04guti\x18\x04 \x01(\x0b\x32\x13.magma.lte.oai.Guti\x12\x13\n\x0b\x65sm_msg_out\x18\x05 \x01(\x0c\x12,\n\x03ies\x18\x06 \x01(\x0b\x32\x1f.magma.lte.oai.AttachRequestIes\x12\r\n\x05ue_id\x18\x07 \x01(\r\x12\x0b\n\x03ksi\x18\x08 \x01(\r\x12\x11\n\temm_cause\x18\t \x01(\r\x12$\n\x05t3450\x18\xc8\x01 \x01(\x0b\x32\x14.magma.lte.oai.Timer\"\x8c\x02\n\x10\x44\x65tachRequestIes\x12\x0c\n\x04type\x18\x01 \x01(\r\x12\x12\n\nswitch_off\x18\x02 \x01(\x08\x12\x14\n\x0cis_native_sc\x18\x03 \x01(\x08\x12\x0b\n\x03ksi\x18\x04 \x01(\r\x12!\n\x04guti\x18\x05 \x01(\x0b\x32\x13.magma.lte.oai.Guti\x12*\n\x04imsi\x18\x06 \x01(\x0b\x32\x1c.magma.lte.oai.IdentityTuple\x12*\n\x04imei\x18\x07 \x01(\x0b\x32\x1c.magma.lte.oai.IdentityTuple\x12\x38\n\rdecode_status\x18\x08 \x01(\x0b\x32!.magma.lte.oai.NasMsgDecodeStatus\"\x0f\n\rTauRequestIes\"\x0c\n\nNasTauProc\"\x9c\x02\n\x08\x41uthProc\x12\x1c\n\x14retransmission_count\x18\x01 \x01(\r\x12\x17\n\x0fsync_fail_count\x18\x02 \x01(\r\x12\x16\n\x0emac_fail_count\x18\x03 \x01(\r\x12\r\n\x05ue_id\x18\x04 \x01(\r\x12\x1a\n\x12is_cause_is_attach\x18\x05 \x01(\x08\x12\x0b\n\x03ksi\x18\x06 \x01(\r\x12\x0c\n\x04rand\x18\x07 \x01(\x0c\x12\x0c\n\x04\x61utn\x18\x08 \x01(\x0c\x12\x34\n\x0eunchecked_imsi\x18\t \x01(\x0b\x32\x1c.magma.lte.oai.IdentityTuple\x12\x11\n\temm_cause\x18\n \x01(\r\x12$\n\x05T3460\x18\xc8\x01 \x01(\x0b\x32\x14.magma.lte.oai.Timer\"\xbc\x03\n\x07SmcProc\x12\r\n\x05ue_id\x18\x01 \x01(\r\x12\x1c\n\x14retransmission_count\x18\x02 \x01(\r\x12\x0b\n\x03ksi\x18\x03 \x01(\r\x12\x0b\n\x03\x65\x65\x61\x18\x04 \x01(\r\x12\x0b\n\x03\x65ia\x18\x05 \x01(\r\x12\x0c\n\x04ucs2\x18\x06 \x01(\r\x12\x0b\n\x03uea\x18\x07 \x01(\r\x12\x0b\n\x03uia\x18\x08 \x01(\r\x12\x0b\n\x03gea\x18\t \x01(\r\x12\x14\n\x0cumts_present\x18\n \x01(\x08\x12\x14\n\x0cgprs_present\x18\x0b \x01(\x08\x12\x14\n\x0cselected_eea\x18\x0c \x01(\r\x12\x14\n\x0cselected_eia\x18\r \x01(\r\x12\x1a\n\x12saved_selected_eea\x18\x0e \x01(\r\x12\x1a\n\x12saved_selected_eia\x18\x0f \x01(\r\x12\x12\n\nsaved_eksi\x18\x10 \x01(\r\x12\x16\n\x0esaved_overflow\x18\x11 \x01(\r\x12\x15\n\rsaved_seq_num\x18\x12 \x01(\r\x12\x15\n\rsaved_sc_type\x18\x13 \x01(\r\x12\x16\n\x0enotify_failure\x18\x14 \x01(\x08\x12\x0e\n\x06is_new\x18\x15 \x01(\x08\x12\x16\n\x0eimeisv_request\x18\x16 \x01(\x08\"\x95\x02\n\x12NasEmmProcWithType\x12+\n\x08\x65mm_proc\x18\x01 \x01(\x0b\x32\x19.magma.lte.oai.NasEmmProc\x12\x30\n\x0b\x61ttach_proc\x18\x02 \x01(\x0b\x32\x19.magma.lte.oai.AttachProcH\x00\x12\x36\n\x0b\x64\x65tach_proc\x18\x03 \x01(\x0b\x32\x1f.magma.lte.oai.DetachRequestIesH\x00\x12*\n\x08smc_proc\x18\x04 \x01(\x0b\x32\x16.magma.lte.oai.SmcProcH\x00\x12,\n\tauth_proc\x18\x05 \x01(\x0b\x32\x17.magma.lte.oai.AuthProcH\x00\x42\x0e\n\x0cMessageTypes\"^\n\x0fNasProcMessSign\x12\x0c\n\x04puid\x18\x01 \x01(\x04\x12\x0e\n\x06\x64igest\x18\x02 \x01(\x0c\x12\x15\n\rdigest_length\x18\x03 \x01(\x04\x12\x16\n\x0enas_msg_length\x18\x04 \x01(\x04\"\xd8\x02\n\rEmmProcedures\x12<\n\x11\x65mm_specific_proc\x18\x01 \x01(\x0b\x32!.magma.lte.oai.NasEmmProcWithType\x12:\n\x0f\x65mm_common_proc\x18\x02 \x03(\x0b\x32!.magma.lte.oai.NasEmmProcWithType\x12)\n\x07\x63n_proc\x18\x03 \x03(\x0b\x32\x18.magma.lte.oai.NasCnProc\x12<\n\x11\x65mm_con_mngt_proc\x18\x04 \x01(\x0b\x32!.magma.lte.oai.NasEmmProcWithType\x12(\n nas_proc_mess_sign_next_location\x18\x05 \x01(\r\x12:\n\x12nas_proc_mess_sign\x18\x06 \x03(\x0b\x32\x1e.magma.lte.oai.NasProcMessSign\"!\n\rEmmCommonData\x12\x10\n\x07pointer\x18\xc8\x01 \x01(\x04\"\xd9\x06\n\x12\x45mmSecurityContext\x12\x0f\n\x07sc_type\x18\x01 \x01(\r\x12\x0c\n\x04\x65ksi\x18\x02 \x01(\r\x12\x14\n\x0cvector_index\x18\x03 \x01(\r\x12\x10\n\x08knas_enc\x18\x04 \x01(\x0c\x12\x10\n\x08knas_int\x18\x05 \x01(\x0c\x12\x39\n\x08\x64l_count\x18\x06 \x01(\x0b\x32\'.magma.lte.oai.EmmSecurityContext.Count\x12\x39\n\x08ul_count\x18\x07 \x01(\x0b\x32\'.magma.lte.oai.EmmSecurityContext.Count\x12>\n\rkenb_ul_count\x18\x08 \x01(\x0b\x32\'.magma.lte.oai.EmmSecurityContext.Count\x12@\n\ncapability\x18\t \x01(\x0b\x32,.magma.lte.oai.EmmSecurityContext.Capability\x12L\n\x0eselected_algos\x18\n \x01(\x0b\x32\x34.magma.lte.oai.EmmSecurityContext.SelectedAlgorithms\x12\x11\n\tactivated\x18\x0b \x01(\r\x12\x18\n\x10\x64irection_encode\x18\x0c \x01(\r\x12\x18\n\x10\x64irection_decode\x18\r \x01(\r\x12\x10\n\x08next_hop\x18\x0e \x01(\x0c\x12\x1f\n\x17next_hop_chaining_count\x18\x0f \x01(\r\x1a\x39\n\x05\x43ount\x12\r\n\x05spare\x18\x01 \x01(\r\x12\x10\n\x08overflow\x18\x02 \x01(\r\x12\x0f\n\x07seq_num\x18\x03 \x01(\r\x1a\xb1\x01\n\nCapability\x12\x16\n\x0e\x65ps_encryption\x18\x01 \x01(\r\x12\x15\n\reps_integirty\x18\x02 \x01(\r\x12\x17\n\x0fumts_encryption\x18\x03 \x01(\r\x12\x16\n\x0eumts_integirty\x18\x04 \x01(\r\x12\x17\n\x0fgprs_encryption\x18\x05 \x01(\r\x12\x14\n\x0cumts_present\x18\x06 \x01(\x08\x12\x14\n\x0cgprs_present\x18\x07 \x01(\x08\x1a;\n\x12SelectedAlgorithms\x12\x12\n\nencryption\x18\x01 \x01(\r\x12\x11\n\tintegrity\x18\x02 \x01(\r\"E\n\nAuthVector\x12\r\n\x05kasme\x18\x01 \x01(\x0c\x12\x0c\n\x04rand\x18\x02 \x01(\x0c\x12\x0c\n\x04\x61utn\x18\x03 \x01(\x0c\x12\x0c\n\x04xres\x18\x04 \x01(\x0c\"\xac\x04\n\x13UeNetworkCapability\x12\x0b\n\x03\x65\x65\x61\x18\x01 \x01(\r\x12\x0b\n\x03\x65ia\x18\x02 \x01(\r\x12\x0b\n\x03uea\x18\x03 \x01(\r\x12\x0c\n\x04ucs2\x18\x04 \x01(\r\x12\x0b\n\x03uia\x18\x05 \x01(\r\x12\x11\n\x05spare\x18\x06 \x01(\rB\x02\x18\x01\x12\x0c\n\x04\x63sfb\x18\x07 \x01(\r\x12\x0b\n\x03lpp\x18\x08 \x01(\r\x12\x0b\n\x03lcs\x18\t \x01(\r\x12\r\n\x05srvcc\x18\n \x01(\r\x12\n\n\x02nf\x18\x0b \x01(\r\x12\x0f\n\x07prosedd\x18\x0c \x01(\r\x12\r\n\x05prose\x18\r \x01(\r\x12\x0f\n\x07h245ash\x18\x0e \x01(\r\x12\x0c\n\x04\x65pco\x18\x0f \x01(\r\x12\x10\n\x08hccpciot\x18\x10 \x01(\r\x12\x10\n\x08\x65rwfopdn\x18\x11 \x01(\r\x12\x0f\n\x07s1udata\x18\x12 \x01(\r\x12\x0e\n\x06upciot\x18\x13 \x01(\r\x12\x14\n\x0cumts_present\x18\x14 \x01(\x08\x12\x18\n\x0cmisc_present\x18\x15 \x01(\x08\x42\x02\x18\x01\x12\x0e\n\x06\x63pciot\x18\x16 \x01(\r\x12\x12\n\nproserelay\x18\x17 \x01(\r\x12\x0f\n\x07prosedc\x18\x18 \x01(\r\x12\x0e\n\x06\x62\x65\x61rer\x18\x19 \x01(\r\x12\x0b\n\x03sgc\x18\x1a \x01(\r\x12\r\n\x05n1mod\x18\x1b \x01(\r\x12\x0c\n\x04\x64\x63nr\x18\x1c \x01(\r\x12\x11\n\tcpbackoff\x18\x1d \x01(\r\x12\x12\n\nrestrictec\x18\x1e \x01(\r\x12\x0e\n\x06v2xpc5\x18\x1f \x01(\r\x12\x13\n\x0bmultipledrb\x18 \x01(\r\x12\x0e\n\x06length\x18! \x01(\r\"8\n\x1eUeAdditionalSecurityCapability\x12\n\n\x02\x65\x61\x18\x01 \x01(\r\x12\n\n\x02ia\x18\x02 \x01(\r\"\x82\x01\n\tBearerQos\x12\x0b\n\x03pci\x18\x01 \x01(\r\x12\n\n\x02pl\x18\x02 \x01(\r\x12\x0b\n\x03pvi\x18\x03 \x01(\r\x12\x0b\n\x03qci\x18\x04 \x01(\r\x12 \n\x03gbr\x18\x05 \x01(\x0b\x32\x13.magma.lte.oai.Ambr\x12 \n\x03mbr\x18\x06 \x01(\x0b\x32\x13.magma.lte.oai.Ambr\"H\n\x18PcoProtocolOrContainerId\x12\n\n\x02id\x18\x01 \x01(\r\x12\x0e\n\x06length\x18\x02 \x01(\r\x12\x10\n\x08\x63ontents\x18\x03 \x01(\x0c\"\xc1\x01\n\x1cProtocolConfigurationOptions\x12\x0b\n\x03\x65xt\x18\x01 \x01(\r\x12\r\n\x05spare\x18\x02 \x01(\r\x12\x17\n\x0f\x63onfig_protocol\x18\x03 \x01(\r\x12$\n\x1cnum_protocol_or_container_id\x18\x04 \x01(\r\x12\x46\n\x15proto_or_container_id\x18\x05 \x03(\x0b\x32\'.magma.lte.oai.PcoProtocolOrContainerId\"M\n\x0f\x45smEbrTimerData\x12\r\n\x05ue_id\x18\x01 \x01(\r\x12\x0b\n\x03\x65\x62i\x18\x02 \x01(\r\x12\r\n\x05\x63ount\x18\x03 \x01(\r\x12\x0f\n\x07\x65sm_msg\x18\x04 \x01(\x0c\"\xda\x01\n\x0b\x45smProcData\x12\x0b\n\x03pti\x18\x01 \x01(\r\x12\x14\n\x0crequest_type\x18\x02 \x01(\r\x12\x0b\n\x03\x61pn\x18\x03 \x01(\t\x12\x0f\n\x07pdn_cid\x18\x04 \x01(\r\x12\x10\n\x08pdn_type\x18\x05 \x01(\r\x12\x10\n\x08pdn_addr\x18\x06 \x01(\t\x12,\n\nbearer_qos\x18\x07 \x01(\x0b\x32\x18.magma.lte.oai.BearerQos\x12\x38\n\x03pco\x18\x08 \x01(\x0b\x32+.magma.lte.oai.ProtocolConfigurationOptions\"\xe7\x01\n\nEsmContext\x12\x15\n\rn_active_ebrs\x18\x01 \x01(\r\x12\x14\n\x0cis_emergency\x18\x04 \x01(\x08\x12\x31\n\resm_proc_data\x18\x05 \x01(\x0b\x32\x1a.magma.lte.oai.EsmProcData\x12$\n\x05t3489\x18\xc7\x01 \x01(\x0b\x32\x14.magma.lte.oai.Timer\x12\x1a\n\ris_standalone\x18\xc8\x01 \x01(\x08\x42\x02\x18\x01\x12\x1a\n\x11is_pdn_disconnect\x18\xc9\x01 \x01(\x08\x12\x1b\n\x12pending_standalone\x18\xca\x01 \x01(\x05\"\xab\x02\n\rEsmEbrContext\x12\x0e\n\x06status\x18\x01 \x01(\r\x12\x0e\n\x06gbr_dl\x18\x02 \x01(\x04\x12\x0e\n\x06gbr_ul\x18\x03 \x01(\x04\x12\x0e\n\x06mbr_dl\x18\x04 \x01(\x04\x12\x0e\n\x06mbr_ul\x18\x05 \x01(\x04\x12/\n\x03tft\x18\x06 \x01(\x0b\x32\".magma.lte.oai.TrafficFlowTemplate\x12\x38\n\x03pco\x18\x07 \x01(\x0b\x32+.magma.lte.oai.ProtocolConfigurationOptions\x12#\n\x05timer\x18\x64 \x01(\x0b\x32\x14.magma.lte.oai.Timer\x12:\n\x12\x65sm_ebr_timer_data\x18\x65 \x01(\x0b\x32\x1e.magma.lte.oai.EsmEbrTimerData\"P\n\x0cNwDetachData\x12\r\n\x05ue_id\x18\x01 \x01(\r\x12\x1c\n\x14retransmission_count\x18\x02 \x01(\r\x12\x13\n\x0b\x64\x65tach_type\x18\x03 \x01(\r\"l\n\rNewAttachInfo\x12\x16\n\x0emme_ue_s1ap_id\x18\x01 \x01(\r\x12\x15\n\ris_mm_ctx_new\x18\x02 \x01(\x08\x12,\n\x03ies\x18\x03 \x01(\x0b\x32\x1f.magma.lte.oai.AttachRequestIes\"\xad\n\n\nEmmContext\x12\x0e\n\x06imsi64\x18\x01 \x01(\x04\x12*\n\x04imsi\x18\x02 \x01(\x0b\x32\x1c.magma.lte.oai.IdentityTuple\x12\x14\n\x0csaved_imsi64\x18\x03 \x01(\x04\x12*\n\x04imei\x18\x04 \x01(\x0b\x32\x1c.magma.lte.oai.IdentityTuple\x12,\n\x06imeisv\x18\x05 \x01(\x0b\x32\x1c.magma.lte.oai.IdentityTuple\x12\x11\n\temm_cause\x18\x06 \x01(\r\x12\x15\n\remm_fsm_state\x18\x07 \x01(\r\x12\x13\n\x0b\x61ttach_type\x18\x08 \x01(\r\x12\x34\n\x0e\x65mm_procedures\x18\x0b \x01(\x0b\x32\x1c.magma.lte.oai.EmmProcedures\x12\x18\n\x10\x63ommon_proc_mask\x18\x0c \x01(\r\x12*\n\x07\x65sm_ctx\x18\r \x01(\x0b\x32\x19.magma.lte.oai.EsmContext\x12\x1b\n\x13member_present_mask\x18\x15 \x01(\r\x12\x19\n\x11member_valid_mask\x18\x16 \x01(\r\x12)\n\x06vector\x18\x17 \x03(\x0b\x32\x19.magma.lte.oai.AuthVector\x12\x33\n\x08security\x18\x18 \x01(\x0b\x32!.magma.lte.oai.EmmSecurityContext\x12@\n\x15_non_current_security\x18\x19 \x01(\x0b\x32!.magma.lte.oai.EmmSecurityContext\x12\x12\n\nis_dynamic\x18\x1f \x01(\x08\x12\x13\n\x0bis_attached\x18 \x01(\x08\x12 \n\x18is_initial_identity_imsi\x18! \x01(\x08\x12\x1c\n\x14is_guti_based_attach\x18\" \x01(\x08\x12\x13\n\x0bis_guti_set\x18# \x01(\x08\x12\x1b\n\x13is_imsi_only_detach\x18$ \x01(\x08\x12\x14\n\x0cis_emergency\x18% \x01(\x08\x12\x1e\n\x16\x61\x64\x64itional_update_type\x18) \x01(\r\x12\x15\n\rtau_updt_type\x18* \x01(\r\x12\x1a\n\x12num_attach_request\x18+ \x01(\r\x12!\n\x04guti\x18\x33 \x01(\x0b\x32\x13.magma.lte.oai.Guti\x12%\n\x08old_guti\x18\x34 \x01(\x0b\x32\x13.magma.lte.oai.Guti\x12(\n\x08tai_list\x18= \x01(\x0b\x32\x16.magma.lte.oai.TaiList\x12#\n\x07lvr_tai\x18> \x01(\x0b\x32\x12.magma.lte.oai.Tai\x12+\n\x0foriginating_tai\x18? \x01(\x0b\x32\x12.magma.lte.oai.Tai\x12\x0b\n\x03ksi\x18@ \x01(\r\x12\x41\n\x15ue_network_capability\x18\x41 \x01(\x0b\x32\".magma.lte.oai.UeNetworkCapability\x12\x33\n\x0enw_detach_data\x18\x42 \x01(\x0b\x32\x1b.magma.lte.oai.NwDetachData\x12\x35\n\x0fnew_attach_info\x18\x43 \x01(\x0b\x32\x1c.magma.lte.oai.NewAttachInfo\x12X\n!ue_additional_security_capability\x18\x44 \x01(\x0b\x32-.magma.lte.oai.UeAdditionalSecurityCapabilityB\x1fZ\x1dmagma/lte/cloud/go/protos/oaib\x06proto3'
,
dependencies=[lte_dot_protos_dot_oai_dot_common__types__pb2.DESCRIPTOR,lte_dot_protos_dot_oai_dot_spgw__state__pb2.DESCRIPTOR,])
_TIMER = _descriptor.Descriptor(
name='Timer',
full_name='magma.lte.oai.Timer',
filename=None,
file=DESCRIPTOR,
containing_type=None,
create_key=_descriptor._internal_create_key,
fields=[
_descriptor.FieldDescriptor(
name='id', full_name='magma.lte.oai.Timer.id', index=0,
number=1, type=4, cpp_type=4, label=1,
has_default_value=False, default_value=0,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key),
_descriptor.FieldDescriptor(
name='sec', full_name='magma.lte.oai.Timer.sec', index=1,
number=2, type=4, cpp_type=4, label=1,
has_default_value=False, default_value=0,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key),
],
extensions=[
],
nested_types=[],
enum_types=[
],
serialized_options=None,
is_extendable=False,
syntax='proto3',
extension_ranges=[],
oneofs=[
],
serialized_start=117,
serialized_end=149,
)
_IDENTITYTUPLE = _descriptor.Descriptor(
name='IdentityTuple',
full_name='magma.lte.oai.IdentityTuple',
filename=None,
file=DESCRIPTOR,
containing_type=None,
create_key=_descriptor._internal_create_key,
fields=[
_descriptor.FieldDescriptor(
name='value', full_name='magma.lte.oai.IdentityTuple.value', index=0,
number=1, type=12, cpp_type=9, label=1,
has_default_value=False, default_value=b"",
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key),
_descriptor.FieldDescriptor(
name='num_digits', full_name='magma.lte.oai.IdentityTuple.num_digits', index=1,
number=2, type=13, cpp_type=3, label=1,
has_default_value=False, default_value=0,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key),
],
extensions=[
],
nested_types=[],
enum_types=[
],
serialized_options=None,
is_extendable=False,
syntax='proto3',
extension_ranges=[],
oneofs=[
],
serialized_start=151,
serialized_end=201,
)
_PARTIALTAILIST = _descriptor.Descriptor(
name='PartialTaiList',
full_name='magma.lte.oai.PartialTaiList',
filename=None,
file=DESCRIPTOR,
containing_type=None,
create_key=_descriptor._internal_create_key,
fields=[
_descriptor.FieldDescriptor(
name='type_of_list', full_name='magma.lte.oai.PartialTaiList.type_of_list', index=0,
number=1, type=13, cpp_type=3, label=1,
has_default_value=False, default_value=0,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key),
_descriptor.FieldDescriptor(
name='number_of_elements', full_name='magma.lte.oai.PartialTaiList.number_of_elements', index=1,
number=2, type=13, cpp_type=3, label=1,
has_default_value=False, default_value=0,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key),
_descriptor.FieldDescriptor(
name='tai_many_plmn', full_name='magma.lte.oai.PartialTaiList.tai_many_plmn', index=2,
number=3, type=11, cpp_type=10, label=3,
has_default_value=False, default_value=[],
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key),
_descriptor.FieldDescriptor(
name='tai_one_plmn_consecutive_tacs', full_name='magma.lte.oai.PartialTaiList.tai_one_plmn_consecutive_tacs', index=3,
number=4, type=11, cpp_type=10, label=1,
has_default_value=False, default_value=None,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key),
_descriptor.FieldDescriptor(
name='plmn', full_name='magma.lte.oai.PartialTaiList.plmn', index=4,
number=5, type=12, cpp_type=9, label=1,
has_default_value=False, default_value=b"",
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key),
_descriptor.FieldDescriptor(
name='tac', full_name='magma.lte.oai.PartialTaiList.tac', index=5,
number=6, type=13, cpp_type=3, label=3,
has_default_value=False, default_value=[],
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key),
],
extensions=[
],
nested_types=[],
enum_types=[
],
serialized_options=None,
is_extendable=False,
syntax='proto3',
extension_ranges=[],
oneofs=[
],
serialized_start=204,
serialized_end=399,
)
_TAILIST = _descriptor.Descriptor(
name='TaiList',
full_name='magma.lte.oai.TaiList',
filename=None,
file=DESCRIPTOR,
containing_type=None,
create_key=_descriptor._internal_create_key,
fields=[
_descriptor.FieldDescriptor(
name='numberoflists', full_name='magma.lte.oai.TaiList.numberoflists', index=0,
number=1, type=13, cpp_type=3, label=1,
has_default_value=False, default_value=0,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key),
_descriptor.FieldDescriptor(
name='partial_tai_lists', full_name='magma.lte.oai.TaiList.partial_tai_lists', index=1,
number=2, type=11, cpp_type=10, label=3,
has_default_value=False, default_value=[],
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key),
],
extensions=[
],
nested_types=[],
enum_types=[
],
serialized_options=None,
is_extendable=False,
syntax='proto3',
extension_ranges=[],
oneofs=[
],
serialized_start=401,
serialized_end=491,
)
_TAI = _descriptor.Descriptor(
name='Tai',
full_name='magma.lte.oai.Tai',
filename=None,
file=DESCRIPTOR,
containing_type=None,
create_key=_descriptor._internal_create_key,
fields=[
_descriptor.FieldDescriptor(
name='mcc_mnc', full_name='magma.lte.oai.Tai.mcc_mnc', index=0,
number=1, type=12, cpp_type=9, label=1,
has_default_value=False, default_value=b"",
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key),
_descriptor.FieldDescriptor(
name='tac', full_name='magma.lte.oai.Tai.tac', index=1,
number=2, type=13, cpp_type=3, label=1,
has_default_value=False, default_value=0,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key),
],
extensions=[
],
nested_types=[],
enum_types=[
],
serialized_options=None,
is_extendable=False,
syntax='proto3',
extension_ranges=[],
oneofs=[
],
serialized_start=493,
serialized_end=528,
)
_NASBASEPROC = _descriptor.Descriptor(
name='NasBaseProc',
full_name='magma.lte.oai.NasBaseProc',
filename=None,
file=DESCRIPTOR,
containing_type=None,
create_key=_descriptor._internal_create_key,
fields=[
_descriptor.FieldDescriptor(
name='nas_puid', full_name='magma.lte.oai.NasBaseProc.nas_puid', index=0,
number=1, type=4, cpp_type=4, label=1,
has_default_value=False, default_value=0,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key),
_descriptor.FieldDescriptor(
name='type', full_name='magma.lte.oai.NasBaseProc.type', index=1,
number=2, type=13, cpp_type=3, label=1,
has_default_value=False, default_value=0,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key),
],
extensions=[
],
nested_types=[],
enum_types=[
],
serialized_options=None,
is_extendable=False,
syntax='proto3',
extension_ranges=[],
oneofs=[
],
serialized_start=530,
serialized_end=575,
)
_NASEMMPROC = _descriptor.Descriptor(
name='NasEmmProc',
full_name='magma.lte.oai.NasEmmProc',
filename=None,
file=DESCRIPTOR,
containing_type=None,
create_key=_descriptor._internal_create_key,
fields=[
_descriptor.FieldDescriptor(
name='base_proc', full_name='magma.lte.oai.NasEmmProc.base_proc', index=0,
number=1, type=11, cpp_type=10, label=1,
has_default_value=False, default_value=None,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key),
_descriptor.FieldDescriptor(
name='type', full_name='magma.lte.oai.NasEmmProc.type', index=1,
number=2, type=13, cpp_type=3, label=1,
has_default_value=False, default_value=0,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key),
_descriptor.FieldDescriptor(
name='previous_emm_fsm_state', full_name='magma.lte.oai.NasEmmProc.previous_emm_fsm_state', index=2,
number=3, type=13, cpp_type=3, label=1,
has_default_value=False, default_value=0,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key),
],
extensions=[
],
nested_types=[],
enum_types=[
],
serialized_options=None,
is_extendable=False,
syntax='proto3',
extension_ranges=[],
oneofs=[
],
serialized_start=577,
serialized_end=682,
)
_AUTHINFOPROC = _descriptor.Descriptor(
name='AuthInfoProc',
full_name='magma.lte.oai.AuthInfoProc',
filename=None,
file=DESCRIPTOR,
containing_type=None,
create_key=_descriptor._internal_create_key,
fields=[
_descriptor.FieldDescriptor(
name='request_sent', full_name='magma.lte.oai.AuthInfoProc.request_sent', index=0,
number=1, type=8, cpp_type=7, label=1,
has_default_value=False, default_value=False,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key),
_descriptor.FieldDescriptor(
name='vector', full_name='magma.lte.oai.AuthInfoProc.vector', index=1,
number=2, type=11, cpp_type=10, label=3,
has_default_value=False, default_value=[],
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key),
_descriptor.FieldDescriptor(
name='nas_cause', full_name='magma.lte.oai.AuthInfoProc.nas_cause', index=2,
number=3, type=13, cpp_type=3, label=1,
has_default_value=False, default_value=0,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key),
_descriptor.FieldDescriptor(
name='ue_id', full_name='magma.lte.oai.AuthInfoProc.ue_id', index=3,
number=4, type=13, cpp_type=3, label=1,
has_default_value=False, default_value=0,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key),
_descriptor.FieldDescriptor(
name='resync', full_name='magma.lte.oai.AuthInfoProc.resync', index=4,
number=5, type=8, cpp_type=7, label=1,
has_default_value=False, default_value=False,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key),
_descriptor.FieldDescriptor(
name='timer_s6a', full_name='magma.lte.oai.AuthInfoProc.timer_s6a', index=5,
number=200, type=11, cpp_type=10, label=1,
has_default_value=False, default_value=None,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key),
],
extensions=[
],
nested_types=[],
enum_types=[
],
serialized_options=None,
is_extendable=False,
syntax='proto3',
extension_ranges=[],
oneofs=[
],
serialized_start=685,
serialized_end=856,
)
_NASCNPROC = _descriptor.Descriptor(
name='NasCnProc',
full_name='magma.lte.oai.NasCnProc',
filename=None,
file=DESCRIPTOR,
containing_type=None,
create_key=_descriptor._internal_create_key,
fields=[
_descriptor.FieldDescriptor(
name='base_proc', full_name='magma.lte.oai.NasCnProc.base_proc', index=0,
number=1, type=11, cpp_type=10, label=1,
has_default_value=False, default_value=None,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key),
_descriptor.FieldDescriptor(
name='auth_info_proc', full_name='magma.lte.oai.NasCnProc.auth_info_proc', index=1,
number=2, type=11, cpp_type=10, label=1,
has_default_value=False, default_value=None,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key),
],
extensions=[
],
nested_types=[],
enum_types=[
],
serialized_options=None,
is_extendable=False,
syntax='proto3',
extension_ranges=[],
oneofs=[
_descriptor.OneofDescriptor(
name='MessageTypes', full_name='magma.lte.oai.NasCnProc.MessageTypes',
index=0, containing_type=None,
create_key=_descriptor._internal_create_key,
fields=[]),
],
serialized_start=859,
serialized_end=988,
)
_MOBILESTACLASSMARK2 = _descriptor.Descriptor(
name='MobileStaClassmark2',
full_name='magma.lte.oai.MobileStaClassmark2',
filename=None,
file=DESCRIPTOR,
containing_type=None,
create_key=_descriptor._internal_create_key,
fields=[
],
extensions=[
],
nested_types=[],
enum_types=[
],
serialized_options=None,
is_extendable=False,
syntax='proto3',
extension_ranges=[],
oneofs=[
],
serialized_start=990,
serialized_end=1011,
)
_VOICEPREFERENCE = _descriptor.Descriptor(
name='VoicePreference',
full_name='magma.lte.oai.VoicePreference',
filename=None,
file=DESCRIPTOR,
containing_type=None,
create_key=_descriptor._internal_create_key,
fields=[
],
extensions=[
],
nested_types=[],
enum_types=[
],
serialized_options=None,
is_extendable=False,
syntax='proto3',
extension_ranges=[],
oneofs=[
],
serialized_start=1013,
serialized_end=1030,
)
_NASMSGDECODESTATUS = _descriptor.Descriptor(
name='NasMsgDecodeStatus',
full_name='magma.lte.oai.NasMsgDecodeStatus',
filename=None,
file=DESCRIPTOR,
containing_type=None,
create_key=_descriptor._internal_create_key,
fields=[
_descriptor.FieldDescriptor(
name='integrity_protected_message', full_name='magma.lte.oai.NasMsgDecodeStatus.integrity_protected_message', index=0,
number=1, type=13, cpp_type=3, label=1,
has_default_value=False, default_value=0,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key),
_descriptor.FieldDescriptor(
name='ciphered_message', full_name='magma.lte.oai.NasMsgDecodeStatus.ciphered_message', index=1,
number=2, type=13, cpp_type=3, label=1,
has_default_value=False, default_value=0,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key),
_descriptor.FieldDescriptor(
name='mac_matched', full_name='magma.lte.oai.NasMsgDecodeStatus.mac_matched', index=2,
number=3, type=13, cpp_type=3, label=1,
has_default_value=False, default_value=0,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key),
_descriptor.FieldDescriptor(
name='security_context_available', full_name='magma.lte.oai.NasMsgDecodeStatus.security_context_available', index=3,
number=4, type=13, cpp_type=3, label=1,
has_default_value=False, default_value=0,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key),
_descriptor.FieldDescriptor(
name='emm_cause', full_name='magma.lte.oai.NasMsgDecodeStatus.emm_cause', index=4,
number=5, type=13, cpp_type=3, label=1,
has_default_value=False, default_value=0,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key),
],
extensions=[
],
nested_types=[],
enum_types=[
],
serialized_options=None,
is_extendable=False,
syntax='proto3',
extension_ranges=[],
oneofs=[
],
serialized_start=1033,
serialized_end=1192,
)
_DRXPARAMETER = _descriptor.Descriptor(
name='DrxParameter',
full_name='magma.lte.oai.DrxParameter',
filename=None,
file=DESCRIPTOR,
containing_type=None,
create_key=_descriptor._internal_create_key,
fields=[
_descriptor.FieldDescriptor(
name='splitpgcyclecode', full_name='magma.lte.oai.DrxParameter.splitpgcyclecode', index=0,
number=1, type=13, cpp_type=3, label=1,
has_default_value=False, default_value=0,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key),
_descriptor.FieldDescriptor(
name='cndrx', full_name='magma.lte.oai.DrxParameter.cndrx', index=1,
number=2, type=13, cpp_type=3, label=1,
has_default_value=False, default_value=0,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key),
_descriptor.FieldDescriptor(
name='splitonccch', full_name='magma.lte.oai.DrxParameter.splitonccch', index=2,
number=3, type=13, cpp_type=3, label=1,
has_default_value=False, default_value=0,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key),
_descriptor.FieldDescriptor(
name='nondrxtimer', full_name='magma.lte.oai.DrxParameter.nondrxtimer', index=3,
number=4, type=13, cpp_type=3, label=1,
has_default_value=False, default_value=0,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key),
],
extensions=[
],
nested_types=[],
enum_types=[
],
serialized_options=None,
is_extendable=False,
syntax='proto3',
extension_ranges=[],
oneofs=[
],
serialized_start=1194,
serialized_end=1291,
)
_ATTACHREQUESTIES = _descriptor.Descriptor(
name='AttachRequestIes',
full_name='magma.lte.oai.AttachRequestIes',
filename=None,
file=DESCRIPTOR,
containing_type=None,
create_key=_descriptor._internal_create_key,
fields=[
_descriptor.FieldDescriptor(
name='is_initial', full_name='magma.lte.oai.AttachRequestIes.is_initial', index=0,
number=1, type=8, cpp_type=7, label=1,
has_default_value=False, default_value=False,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key),
_descriptor.FieldDescriptor(
name='type', full_name='magma.lte.oai.AttachRequestIes.type', index=1,
number=2, type=13, cpp_type=3, label=1,
has_default_value=False, default_value=0,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key),
_descriptor.FieldDescriptor(
name='additional_update_type', full_name='magma.lte.oai.AttachRequestIes.additional_update_type', index=2,
number=3, type=13, cpp_type=3, label=1,
has_default_value=False, default_value=0,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key),
_descriptor.FieldDescriptor(
name='is_native_sc', full_name='magma.lte.oai.AttachRequestIes.is_native_sc', index=3,
number=4, type=8, cpp_type=7, label=1,
has_default_value=False, default_value=False,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key),
_descriptor.FieldDescriptor(
name='ksi', full_name='magma.lte.oai.AttachRequestIes.ksi', index=4,
number=5, type=13, cpp_type=3, label=1,
has_default_value=False, default_value=0,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key),
_descriptor.FieldDescriptor(
name='is_native_guti', full_name='magma.lte.oai.AttachRequestIes.is_native_guti', index=5,
number=6, type=8, cpp_type=7, label=1,
has_default_value=False, default_value=False,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key),
_descriptor.FieldDescriptor(
name='guti', full_name='magma.lte.oai.AttachRequestIes.guti', index=6,
number=7, type=11, cpp_type=10, label=1,
has_default_value=False, default_value=None,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key),
_descriptor.FieldDescriptor(
name='imsi', full_name='magma.lte.oai.AttachRequestIes.imsi', index=7,
number=8, type=11, cpp_type=10, label=1,
has_default_value=False, default_value=None,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key),
_descriptor.FieldDescriptor(
name='imei', full_name='magma.lte.oai.AttachRequestIes.imei', index=8,
number=9, type=11, cpp_type=10, label=1,
has_default_value=False, default_value=None,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key),
_descriptor.FieldDescriptor(
name='last_visited_tai', full_name='magma.lte.oai.AttachRequestIes.last_visited_tai', index=9,
number=10, type=11, cpp_type=10, label=1,
has_default_value=False, default_value=None,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key),
_descriptor.FieldDescriptor(
name='origin_tai', full_name='magma.lte.oai.AttachRequestIes.origin_tai', index=10,
number=11, type=11, cpp_type=10, label=1,
has_default_value=False, default_value=None,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key),
_descriptor.FieldDescriptor(
name='origin_ecgi', full_name='magma.lte.oai.AttachRequestIes.origin_ecgi', index=11,
number=12, type=11, cpp_type=10, label=1,
has_default_value=False, default_value=None,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key),
_descriptor.FieldDescriptor(
name='ue_nw_capability', full_name='magma.lte.oai.AttachRequestIes.ue_nw_capability', index=12,
number=13, type=11, cpp_type=10, label=1,
has_default_value=False, default_value=None,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key),
_descriptor.FieldDescriptor(
name='drx_parameter', full_name='magma.lte.oai.AttachRequestIes.drx_parameter', index=13,
number=14, type=11, cpp_type=10, label=1,
has_default_value=False, default_value=None,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key),
_descriptor.FieldDescriptor(
name='esm_msg', full_name='magma.lte.oai.AttachRequestIes.esm_msg', index=14,
number=15, type=12, cpp_type=9, label=1,
has_default_value=False, default_value=b"",
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key),
_descriptor.FieldDescriptor(
name='decode_status', full_name='magma.lte.oai.AttachRequestIes.decode_status', index=15,
number=16, type=11, cpp_type=10, label=1,
has_default_value=False, default_value=None,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key),
_descriptor.FieldDescriptor(
name='classmark2', full_name='magma.lte.oai.AttachRequestIes.classmark2', index=16,
number=17, type=11, cpp_type=10, label=1,
has_default_value=False, default_value=None,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key),
_descriptor.FieldDescriptor(
name='voice_preference', full_name='magma.lte.oai.AttachRequestIes.voice_preference', index=17,
number=18, type=11, cpp_type=10, label=1,
has_default_value=False, default_value=None,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key),
_descriptor.FieldDescriptor(
name='ue_additional_security_capability', full_name='magma.lte.oai.AttachRequestIes.ue_additional_security_capability', index=18,
number=19, type=11, cpp_type=10, label=1,
has_default_value=False, default_value=None,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key),
],
extensions=[
],
nested_types=[],
enum_types=[
],
serialized_options=None,
is_extendable=False,
syntax='proto3',
extension_ranges=[],
oneofs=[
],
serialized_start=1294,
serialized_end=2081,
)
_ATTACHPROC = _descriptor.Descriptor(
name='AttachProc',
full_name='magma.lte.oai.AttachProc',
filename=None,
file=DESCRIPTOR,
containing_type=None,
create_key=_descriptor._internal_create_key,
fields=[
_descriptor.FieldDescriptor(
name='attach_accept_sent', full_name='magma.lte.oai.AttachProc.attach_accept_sent', index=0,
number=1, type=13, cpp_type=3, label=1,
has_default_value=False, default_value=0,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key),
_descriptor.FieldDescriptor(
name='attach_reject_sent', full_name='magma.lte.oai.AttachProc.attach_reject_sent', index=1,
number=2, type=8, cpp_type=7, label=1,
has_default_value=False, default_value=False,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key),
_descriptor.FieldDescriptor(
name='attach_complete_received', full_name='magma.lte.oai.AttachProc.attach_complete_received', index=2,
number=3, type=8, cpp_type=7, label=1,
has_default_value=False, default_value=False,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key),
_descriptor.FieldDescriptor(
name='guti', full_name='magma.lte.oai.AttachProc.guti', index=3,
number=4, type=11, cpp_type=10, label=1,
has_default_value=False, default_value=None,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key),
_descriptor.FieldDescriptor(
name='esm_msg_out', full_name='magma.lte.oai.AttachProc.esm_msg_out', index=4,
number=5, type=12, cpp_type=9, label=1,
has_default_value=False, default_value=b"",
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key),
_descriptor.FieldDescriptor(
name='ies', full_name='magma.lte.oai.AttachProc.ies', index=5,
number=6, type=11, cpp_type=10, label=1,
has_default_value=False, default_value=None,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key),
_descriptor.FieldDescriptor(
name='ue_id', full_name='magma.lte.oai.AttachProc.ue_id', index=6,
number=7, type=13, cpp_type=3, label=1,
has_default_value=False, default_value=0,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key),
_descriptor.FieldDescriptor(
name='ksi', full_name='magma.lte.oai.AttachProc.ksi', index=7,
number=8, type=13, cpp_type=3, label=1,
has_default_value=False, default_value=0,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key),
_descriptor.FieldDescriptor(
name='emm_cause', full_name='magma.lte.oai.AttachProc.emm_cause', index=8,
number=9, type=13, cpp_type=3, label=1,
has_default_value=False, default_value=0,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key),
_descriptor.FieldDescriptor(
name='t3450', full_name='magma.lte.oai.AttachProc.t3450', index=9,
number=200, type=11, cpp_type=10, label=1,
has_default_value=False, default_value=None,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key),
],
extensions=[
],
nested_types=[],
enum_types=[
],
serialized_options=None,
is_extendable=False,
syntax='proto3',
extension_ranges=[],
oneofs=[
],
serialized_start=2084,
serialized_end=2373,
)
_DETACHREQUESTIES = _descriptor.Descriptor(
name='DetachRequestIes',
full_name='magma.lte.oai.DetachRequestIes',
filename=None,
file=DESCRIPTOR,
containing_type=None,
create_key=_descriptor._internal_create_key,
fields=[
_descriptor.FieldDescriptor(
name='type', full_name='magma.lte.oai.DetachRequestIes.type', index=0,
number=1, type=13, cpp_type=3, label=1,
has_default_value=False, default_value=0,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key),
_descriptor.FieldDescriptor(
name='switch_off', full_name='magma.lte.oai.DetachRequestIes.switch_off', index=1,
number=2, type=8, cpp_type=7, label=1,
has_default_value=False, default_value=False,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key),
_descriptor.FieldDescriptor(
name='is_native_sc', full_name='magma.lte.oai.DetachRequestIes.is_native_sc', index=2,
number=3, type=8, cpp_type=7, label=1,
has_default_value=False, default_value=False,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key),
_descriptor.FieldDescriptor(
name='ksi', full_name='magma.lte.oai.DetachRequestIes.ksi', index=3,
number=4, type=13, cpp_type=3, label=1,
has_default_value=False, default_value=0,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key),
_descriptor.FieldDescriptor(
name='guti', full_name='magma.lte.oai.DetachRequestIes.guti', index=4,
number=5, type=11, cpp_type=10, label=1,
has_default_value=False, default_value=None,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key),
_descriptor.FieldDescriptor(
name='imsi', full_name='magma.lte.oai.DetachRequestIes.imsi', index=5,
number=6, type=11, cpp_type=10, label=1,
has_default_value=False, default_value=None,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key),
_descriptor.FieldDescriptor(
name='imei', full_name='magma.lte.oai.DetachRequestIes.imei', index=6,
number=7, type=11, cpp_type=10, label=1,
has_default_value=False, default_value=None,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key),
_descriptor.FieldDescriptor(
name='decode_status', full_name='magma.lte.oai.DetachRequestIes.decode_status', index=7,
number=8, type=11, cpp_type=10, label=1,
has_default_value=False, default_value=None,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key),
],
extensions=[
],
nested_types=[],
enum_types=[
],
serialized_options=None,
is_extendable=False,
syntax='proto3',
extension_ranges=[],
oneofs=[
],
serialized_start=2376,
serialized_end=2644,
)
_TAUREQUESTIES = _descriptor.Descriptor(
name='TauRequestIes',
full_name='magma.lte.oai.TauRequestIes',
filename=None,
file=DESCRIPTOR,
containing_type=None,
create_key=_descriptor._internal_create_key,
fields=[
],
extensions=[
],
nested_types=[],
enum_types=[
],
serialized_options=None,
is_extendable=False,
syntax='proto3',
extension_ranges=[],
oneofs=[
],
serialized_start=2646,
serialized_end=2661,
)
_NASTAUPROC = _descriptor.Descriptor(
name='NasTauProc',
full_name='magma.lte.oai.NasTauProc',
filename=None,
file=DESCRIPTOR,
containing_type=None,
create_key=_descriptor._internal_create_key,
fields=[
],
extensions=[
],
nested_types=[],
enum_types=[
],
serialized_options=None,
is_extendable=False,
syntax='proto3',
extension_ranges=[],
oneofs=[
],
serialized_start=2663,
serialized_end=2675,
)
_AUTHPROC = _descriptor.Descriptor(
name='AuthProc',
full_name='magma.lte.oai.AuthProc',
filename=None,
file=DESCRIPTOR,
containing_type=None,
create_key=_descriptor._internal_create_key,
fields=[
_descriptor.FieldDescriptor(
name='retransmission_count', full_name='magma.lte.oai.AuthProc.retransmission_count', index=0,
number=1, type=13, cpp_type=3, label=1,
has_default_value=False, default_value=0,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key),
_descriptor.FieldDescriptor(
name='sync_fail_count', full_name='magma.lte.oai.AuthProc.sync_fail_count', index=1,
number=2, type=13, cpp_type=3, label=1,
has_default_value=False, default_value=0,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key),
_descriptor.FieldDescriptor(
name='mac_fail_count', full_name='magma.lte.oai.AuthProc.mac_fail_count', index=2,
number=3, type=13, cpp_type=3, label=1,
has_default_value=False, default_value=0,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key),
_descriptor.FieldDescriptor(
name='ue_id', full_name='magma.lte.oai.AuthProc.ue_id', index=3,
number=4, type=13, cpp_type=3, label=1,
has_default_value=False, default_value=0,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key),
_descriptor.FieldDescriptor(
name='is_cause_is_attach', full_name='magma.lte.oai.AuthProc.is_cause_is_attach', index=4,
number=5, type=8, cpp_type=7, label=1,
has_default_value=False, default_value=False,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key),
_descriptor.FieldDescriptor(
name='ksi', full_name='magma.lte.oai.AuthProc.ksi', index=5,
number=6, type=13, cpp_type=3, label=1,
has_default_value=False, default_value=0,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key),
_descriptor.FieldDescriptor(
name='rand', full_name='magma.lte.oai.AuthProc.rand', index=6,
number=7, type=12, cpp_type=9, label=1,
has_default_value=False, default_value=b"",
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key),
_descriptor.FieldDescriptor(
name='autn', full_name='magma.lte.oai.AuthProc.autn', index=7,
number=8, type=12, cpp_type=9, label=1,
has_default_value=False, default_value=b"",
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key),
_descriptor.FieldDescriptor(
name='unchecked_imsi', full_name='magma.lte.oai.AuthProc.unchecked_imsi', index=8,
number=9, type=11, cpp_type=10, label=1,
has_default_value=False, default_value=None,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key),
_descriptor.FieldDescriptor(
name='emm_cause', full_name='magma.lte.oai.AuthProc.emm_cause', index=9,
number=10, type=13, cpp_type=3, label=1,
has_default_value=False, default_value=0,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key),
_descriptor.FieldDescriptor(
name='T3460', full_name='magma.lte.oai.AuthProc.T3460', index=10,
number=200, type=11, cpp_type=10, label=1,
has_default_value=False, default_value=None,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key),
],
extensions=[
],
nested_types=[],
enum_types=[
],
serialized_options=None,
is_extendable=False,
syntax='proto3',
extension_ranges=[],
oneofs=[
],
serialized_start=2678,
serialized_end=2962,
)
_SMCPROC = _descriptor.Descriptor(
name='SmcProc',
full_name='magma.lte.oai.SmcProc',
filename=None,
file=DESCRIPTOR,
containing_type=None,
create_key=_descriptor._internal_create_key,
fields=[
_descriptor.FieldDescriptor(
name='ue_id', full_name='magma.lte.oai.SmcProc.ue_id', index=0,
number=1, type=13, cpp_type=3, label=1,
has_default_value=False, default_value=0,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key),
_descriptor.FieldDescriptor(
name='retransmission_count', full_name='magma.lte.oai.SmcProc.retransmission_count', index=1,
number=2, type=13, cpp_type=3, label=1,
has_default_value=False, default_value=0,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key),
_descriptor.FieldDescriptor(
name='ksi', full_name='magma.lte.oai.SmcProc.ksi', index=2,
number=3, type=13, cpp_type=3, label=1,
has_default_value=False, default_value=0,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key),
_descriptor.FieldDescriptor(
name='eea', full_name='magma.lte.oai.SmcProc.eea', index=3,
number=4, type=13, cpp_type=3, label=1,
has_default_value=False, default_value=0,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key),
_descriptor.FieldDescriptor(
name='eia', full_name='magma.lte.oai.SmcProc.eia', index=4,
number=5, type=13, cpp_type=3, label=1,
has_default_value=False, default_value=0,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key),
_descriptor.FieldDescriptor(
name='ucs2', full_name='magma.lte.oai.SmcProc.ucs2', index=5,
number=6, type=13, cpp_type=3, label=1,
has_default_value=False, default_value=0,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key),
_descriptor.FieldDescriptor(
name='uea', full_name='magma.lte.oai.SmcProc.uea', index=6,
number=7, type=13, cpp_type=3, label=1,
has_default_value=False, default_value=0,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key),
_descriptor.FieldDescriptor(
name='uia', full_name='magma.lte.oai.SmcProc.uia', index=7,
number=8, type=13, cpp_type=3, label=1,
has_default_value=False, default_value=0,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key),
_descriptor.FieldDescriptor(
name='gea', full_name='magma.lte.oai.SmcProc.gea', index=8,
number=9, type=13, cpp_type=3, label=1,
has_default_value=False, default_value=0,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key),
_descriptor.FieldDescriptor(
name='umts_present', full_name='magma.lte.oai.SmcProc.umts_present', index=9,
number=10, type=8, cpp_type=7, label=1,
has_default_value=False, default_value=False,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key),
_descriptor.FieldDescriptor(
name='gprs_present', full_name='magma.lte.oai.SmcProc.gprs_present', index=10,
number=11, type=8, cpp_type=7, label=1,
has_default_value=False, default_value=False,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key),
_descriptor.FieldDescriptor(
name='selected_eea', full_name='magma.lte.oai.SmcProc.selected_eea', index=11,
number=12, type=13, cpp_type=3, label=1,
has_default_value=False, default_value=0,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key),
_descriptor.FieldDescriptor(
name='selected_eia', full_name='magma.lte.oai.SmcProc.selected_eia', index=12,
number=13, type=13, cpp_type=3, label=1,
has_default_value=False, default_value=0,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key),
_descriptor.FieldDescriptor(
name='saved_selected_eea', full_name='magma.lte.oai.SmcProc.saved_selected_eea', index=13,
number=14, type=13, cpp_type=3, label=1,
has_default_value=False, default_value=0,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key),
_descriptor.FieldDescriptor(
name='saved_selected_eia', full_name='magma.lte.oai.SmcProc.saved_selected_eia', index=14,
number=15, type=13, cpp_type=3, label=1,
has_default_value=False, default_value=0,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key),
_descriptor.FieldDescriptor(
name='saved_eksi', full_name='magma.lte.oai.SmcProc.saved_eksi', index=15,
number=16, type=13, cpp_type=3, label=1,
has_default_value=False, default_value=0,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key),
_descriptor.FieldDescriptor(
name='saved_overflow', full_name='magma.lte.oai.SmcProc.saved_overflow', index=16,
number=17, type=13, cpp_type=3, label=1,
has_default_value=False, default_value=0,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key),
_descriptor.FieldDescriptor(
name='saved_seq_num', full_name='magma.lte.oai.SmcProc.saved_seq_num', index=17,
number=18, type=13, cpp_type=3, label=1,
has_default_value=False, default_value=0,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key),
_descriptor.FieldDescriptor(
name='saved_sc_type', full_name='magma.lte.oai.SmcProc.saved_sc_type', index=18,
number=19, type=13, cpp_type=3, label=1,
has_default_value=False, default_value=0,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key),
_descriptor.FieldDescriptor(
name='notify_failure', full_name='magma.lte.oai.SmcProc.notify_failure', index=19,
number=20, type=8, cpp_type=7, label=1,
has_default_value=False, default_value=False,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key),
_descriptor.FieldDescriptor(
name='is_new', full_name='magma.lte.oai.SmcProc.is_new', index=20,
number=21, type=8, cpp_type=7, label=1,
has_default_value=False, default_value=False,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key),
_descriptor.FieldDescriptor(
name='imeisv_request', full_name='magma.lte.oai.SmcProc.imeisv_request', index=21,
number=22, type=8, cpp_type=7, label=1,
has_default_value=False, default_value=False,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key),
],
extensions=[
],
nested_types=[],
enum_types=[
],
serialized_options=None,
is_extendable=False,
syntax='proto3',
extension_ranges=[],
oneofs=[
],
serialized_start=2965,
serialized_end=3409,
)
_NASEMMPROCWITHTYPE = _descriptor.Descriptor(
name='NasEmmProcWithType',
full_name='magma.lte.oai.NasEmmProcWithType',
filename=None,
file=DESCRIPTOR,
containing_type=None,
create_key=_descriptor._internal_create_key,
fields=[
_descriptor.FieldDescriptor(
name='emm_proc', full_name='magma.lte.oai.NasEmmProcWithType.emm_proc', index=0,
number=1, type=11, cpp_type=10, label=1,
has_default_value=False, default_value=None,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key),
_descriptor.FieldDescriptor(
name='attach_proc', full_name='magma.lte.oai.NasEmmProcWithType.attach_proc', index=1,
number=2, type=11, cpp_type=10, label=1,
has_default_value=False, default_value=None,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key),
_descriptor.FieldDescriptor(
name='detach_proc', full_name='magma.lte.oai.NasEmmProcWithType.detach_proc', index=2,
number=3, type=11, cpp_type=10, label=1,
has_default_value=False, default_value=None,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key),
_descriptor.FieldDescriptor(
name='smc_proc', full_name='magma.lte.oai.NasEmmProcWithType.smc_proc', index=3,
number=4, type=11, cpp_type=10, label=1,
has_default_value=False, default_value=None,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key),
_descriptor.FieldDescriptor(
name='auth_proc', full_name='magma.lte.oai.NasEmmProcWithType.auth_proc', index=4,
number=5, type=11, cpp_type=10, label=1,
has_default_value=False, default_value=None,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key),
],
extensions=[
],
nested_types=[],
enum_types=[
],
serialized_options=None,
is_extendable=False,
syntax='proto3',
extension_ranges=[],
oneofs=[
_descriptor.OneofDescriptor(
name='MessageTypes', full_name='magma.lte.oai.NasEmmProcWithType.MessageTypes',
index=0, containing_type=None,
create_key=_descriptor._internal_create_key,
fields=[]),
],
serialized_start=3412,
serialized_end=3689,
)
_NASPROCMESSSIGN = _descriptor.Descriptor(
name='NasProcMessSign',
full_name='magma.lte.oai.NasProcMessSign',
filename=None,
file=DESCRIPTOR,
containing_type=None,
create_key=_descriptor._internal_create_key,
fields=[
_descriptor.FieldDescriptor(
name='puid', full_name='magma.lte.oai.NasProcMessSign.puid', index=0,
number=1, type=4, cpp_type=4, label=1,
has_default_value=False, default_value=0,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key),
_descriptor.FieldDescriptor(
name='digest', full_name='magma.lte.oai.NasProcMessSign.digest', index=1,
number=2, type=12, cpp_type=9, label=1,
has_default_value=False, default_value=b"",
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key),
_descriptor.FieldDescriptor(
name='digest_length', full_name='magma.lte.oai.NasProcMessSign.digest_length', index=2,
number=3, type=4, cpp_type=4, label=1,
has_default_value=False, default_value=0,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key),
_descriptor.FieldDescriptor(
name='nas_msg_length', full_name='magma.lte.oai.NasProcMessSign.nas_msg_length', index=3,
number=4, type=4, cpp_type=4, label=1,
has_default_value=False, default_value=0,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key),
],
extensions=[
],
nested_types=[],
enum_types=[
],
serialized_options=None,
is_extendable=False,
syntax='proto3',
extension_ranges=[],
oneofs=[
],
serialized_start=3691,
serialized_end=3785,
)
_EMMPROCEDURES = _descriptor.Descriptor(
name='EmmProcedures',
full_name='magma.lte.oai.EmmProcedures',
filename=None,
file=DESCRIPTOR,
containing_type=None,
create_key=_descriptor._internal_create_key,
fields=[
_descriptor.FieldDescriptor(
name='emm_specific_proc', full_name='magma.lte.oai.EmmProcedures.emm_specific_proc', index=0,
number=1, type=11, cpp_type=10, label=1,
has_default_value=False, default_value=None,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key),
_descriptor.FieldDescriptor(
name='emm_common_proc', full_name='magma.lte.oai.EmmProcedures.emm_common_proc', index=1,
number=2, type=11, cpp_type=10, label=3,
has_default_value=False, default_value=[],
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key),
_descriptor.FieldDescriptor(
name='cn_proc', full_name='magma.lte.oai.EmmProcedures.cn_proc', index=2,
number=3, type=11, cpp_type=10, label=3,
has_default_value=False, default_value=[],
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key),
_descriptor.FieldDescriptor(
name='emm_con_mngt_proc', full_name='magma.lte.oai.EmmProcedures.emm_con_mngt_proc', index=3,
number=4, type=11, cpp_type=10, label=1,
has_default_value=False, default_value=None,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key),
_descriptor.FieldDescriptor(
name='nas_proc_mess_sign_next_location', full_name='magma.lte.oai.EmmProcedures.nas_proc_mess_sign_next_location', index=4,
number=5, type=13, cpp_type=3, label=1,
has_default_value=False, default_value=0,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key),
_descriptor.FieldDescriptor(
name='nas_proc_mess_sign', full_name='magma.lte.oai.EmmProcedures.nas_proc_mess_sign', index=5,
number=6, type=11, cpp_type=10, label=3,
has_default_value=False, default_value=[],
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key),
],
extensions=[
],
nested_types=[],
enum_types=[
],
serialized_options=None,
is_extendable=False,
syntax='proto3',
extension_ranges=[],
oneofs=[
],
serialized_start=3788,
serialized_end=4132,
)
_EMMCOMMONDATA = _descriptor.Descriptor(
name='EmmCommonData',
full_name='magma.lte.oai.EmmCommonData',
filename=None,
file=DESCRIPTOR,
containing_type=None,
create_key=_descriptor._internal_create_key,
fields=[
_descriptor.FieldDescriptor(
name='pointer', full_name='magma.lte.oai.EmmCommonData.pointer', index=0,
number=200, type=4, cpp_type=4, label=1,
has_default_value=False, default_value=0,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key),
],
extensions=[
],
nested_types=[],
enum_types=[
],
serialized_options=None,
is_extendable=False,
syntax='proto3',
extension_ranges=[],
oneofs=[
],
serialized_start=4134,
serialized_end=4167,
)
_EMMSECURITYCONTEXT_COUNT = _descriptor.Descriptor(
name='Count',
full_name='magma.lte.oai.EmmSecurityContext.Count',
filename=None,
file=DESCRIPTOR,
containing_type=None,
create_key=_descriptor._internal_create_key,
fields=[
_descriptor.FieldDescriptor(
name='spare', full_name='magma.lte.oai.EmmSecurityContext.Count.spare', index=0,
number=1, type=13, cpp_type=3, label=1,
has_default_value=False, default_value=0,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key),
_descriptor.FieldDescriptor(
name='overflow', full_name='magma.lte.oai.EmmSecurityContext.Count.overflow', index=1,
number=2, type=13, cpp_type=3, label=1,
has_default_value=False, default_value=0,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key),
_descriptor.FieldDescriptor(
name='seq_num', full_name='magma.lte.oai.EmmSecurityContext.Count.seq_num', index=2,
number=3, type=13, cpp_type=3, label=1,
has_default_value=False, default_value=0,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key),
],
extensions=[
],
nested_types=[],
enum_types=[
],
serialized_options=None,
is_extendable=False,
syntax='proto3',
extension_ranges=[],
oneofs=[
],
serialized_start=4729,
serialized_end=4786,
)
_EMMSECURITYCONTEXT_CAPABILITY = _descriptor.Descriptor(
name='Capability',
full_name='magma.lte.oai.EmmSecurityContext.Capability',
filename=None,
file=DESCRIPTOR,
containing_type=None,
create_key=_descriptor._internal_create_key,
fields=[
_descriptor.FieldDescriptor(
name='eps_encryption', full_name='magma.lte.oai.EmmSecurityContext.Capability.eps_encryption', index=0,
number=1, type=13, cpp_type=3, label=1,
has_default_value=False, default_value=0,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key),
_descriptor.FieldDescriptor(
name='eps_integirty', full_name='magma.lte.oai.EmmSecurityContext.Capability.eps_integirty', index=1,
number=2, type=13, cpp_type=3, label=1,
has_default_value=False, default_value=0,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key),
_descriptor.FieldDescriptor(
name='umts_encryption', full_name='magma.lte.oai.EmmSecurityContext.Capability.umts_encryption', index=2,
number=3, type=13, cpp_type=3, label=1,
has_default_value=False, default_value=0,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key),
_descriptor.FieldDescriptor(
name='umts_integirty', full_name='magma.lte.oai.EmmSecurityContext.Capability.umts_integirty', index=3,
number=4, type=13, cpp_type=3, label=1,
has_default_value=False, default_value=0,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key),
_descriptor.FieldDescriptor(
name='gprs_encryption', full_name='magma.lte.oai.EmmSecurityContext.Capability.gprs_encryption', index=4,
number=5, type=13, cpp_type=3, label=1,
has_default_value=False, default_value=0,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key),
_descriptor.FieldDescriptor(
name='umts_present', full_name='magma.lte.oai.EmmSecurityContext.Capability.umts_present', index=5,
number=6, type=8, cpp_type=7, label=1,
has_default_value=False, default_value=False,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key),
_descriptor.FieldDescriptor(
name='gprs_present', full_name='magma.lte.oai.EmmSecurityContext.Capability.gprs_present', index=6,
number=7, type=8, cpp_type=7, label=1,
has_default_value=False, default_value=False,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key),
],
extensions=[
],
nested_types=[],
enum_types=[
],
serialized_options=None,
is_extendable=False,
syntax='proto3',
extension_ranges=[],
oneofs=[
],
serialized_start=4789,
serialized_end=4966,
)
_EMMSECURITYCONTEXT_SELECTEDALGORITHMS = _descriptor.Descriptor(
name='SelectedAlgorithms',
full_name='magma.lte.oai.EmmSecurityContext.SelectedAlgorithms',
filename=None,
file=DESCRIPTOR,
containing_type=None,
create_key=_descriptor._internal_create_key,
fields=[
_descriptor.FieldDescriptor(
name='encryption', full_name='magma.lte.oai.EmmSecurityContext.SelectedAlgorithms.encryption', index=0,
number=1, type=13, cpp_type=3, label=1,
has_default_value=False, default_value=0,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key),
_descriptor.FieldDescriptor(
name='integrity', full_name='magma.lte.oai.EmmSecurityContext.SelectedAlgorithms.integrity', index=1,
number=2, type=13, cpp_type=3, label=1,
has_default_value=False, default_value=0,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key),
],
extensions=[
],
nested_types=[],
enum_types=[
],
serialized_options=None,
is_extendable=False,
syntax='proto3',
extension_ranges=[],
oneofs=[
],
serialized_start=4968,
serialized_end=5027,
)
_EMMSECURITYCONTEXT = _descriptor.Descriptor(
name='EmmSecurityContext',
full_name='magma.lte.oai.EmmSecurityContext',
filename=None,
file=DESCRIPTOR,
containing_type=None,
create_key=_descriptor._internal_create_key,
fields=[
_descriptor.FieldDescriptor(
name='sc_type', full_name='magma.lte.oai.EmmSecurityContext.sc_type', index=0,
number=1, type=13, cpp_type=3, label=1,
has_default_value=False, default_value=0,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key),
_descriptor.FieldDescriptor(
name='eksi', full_name='magma.lte.oai.EmmSecurityContext.eksi', index=1,
number=2, type=13, cpp_type=3, label=1,
has_default_value=False, default_value=0,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key),
_descriptor.FieldDescriptor(
name='vector_index', full_name='magma.lte.oai.EmmSecurityContext.vector_index', index=2,
number=3, type=13, cpp_type=3, label=1,
has_default_value=False, default_value=0,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key),
_descriptor.FieldDescriptor(
name='knas_enc', full_name='magma.lte.oai.EmmSecurityContext.knas_enc', index=3,
number=4, type=12, cpp_type=9, label=1,
has_default_value=False, default_value=b"",
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key),
_descriptor.FieldDescriptor(
name='knas_int', full_name='magma.lte.oai.EmmSecurityContext.knas_int', index=4,
number=5, type=12, cpp_type=9, label=1,
has_default_value=False, default_value=b"",
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key),
_descriptor.FieldDescriptor(
name='dl_count', full_name='magma.lte.oai.EmmSecurityContext.dl_count', index=5,
number=6, type=11, cpp_type=10, label=1,
has_default_value=False, default_value=None,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key),
_descriptor.FieldDescriptor(
name='ul_count', full_name='magma.lte.oai.EmmSecurityContext.ul_count', index=6,
number=7, type=11, cpp_type=10, label=1,
has_default_value=False, default_value=None,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key),
_descriptor.FieldDescriptor(
name='kenb_ul_count', full_name='magma.lte.oai.EmmSecurityContext.kenb_ul_count', index=7,
number=8, type=11, cpp_type=10, label=1,
has_default_value=False, default_value=None,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key),
_descriptor.FieldDescriptor(
name='capability', full_name='magma.lte.oai.EmmSecurityContext.capability', index=8,
number=9, type=11, cpp_type=10, label=1,
has_default_value=False, default_value=None,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key),
_descriptor.FieldDescriptor(
name='selected_algos', full_name='magma.lte.oai.EmmSecurityContext.selected_algos', index=9,
number=10, type=11, cpp_type=10, label=1,
has_default_value=False, default_value=None,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key),
_descriptor.FieldDescriptor(
name='activated', full_name='magma.lte.oai.EmmSecurityContext.activated', index=10,
number=11, type=13, cpp_type=3, label=1,
has_default_value=False, default_value=0,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key),
_descriptor.FieldDescriptor(
name='direction_encode', full_name='magma.lte.oai.EmmSecurityContext.direction_encode', index=11,
number=12, type=13, cpp_type=3, label=1,
has_default_value=False, default_value=0,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key),
_descriptor.FieldDescriptor(
name='direction_decode', full_name='magma.lte.oai.EmmSecurityContext.direction_decode', index=12,
number=13, type=13, cpp_type=3, label=1,
has_default_value=False, default_value=0,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key),
_descriptor.FieldDescriptor(
name='next_hop', full_name='magma.lte.oai.EmmSecurityContext.next_hop', index=13,
number=14, type=12, cpp_type=9, label=1,
has_default_value=False, default_value=b"",
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key),
_descriptor.FieldDescriptor(
name='next_hop_chaining_count', full_name='magma.lte.oai.EmmSecurityContext.next_hop_chaining_count', index=14,
number=15, type=13, cpp_type=3, label=1,
has_default_value=False, default_value=0,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key),
],
extensions=[
],
nested_types=[_EMMSECURITYCONTEXT_COUNT, _EMMSECURITYCONTEXT_CAPABILITY, _EMMSECURITYCONTEXT_SELECTEDALGORITHMS, ],
enum_types=[
],
serialized_options=None,
is_extendable=False,
syntax='proto3',
extension_ranges=[],
oneofs=[
],
serialized_start=4170,
serialized_end=5027,
)
_AUTHVECTOR = _descriptor.Descriptor(
name='AuthVector',
full_name='magma.lte.oai.AuthVector',
filename=None,
file=DESCRIPTOR,
containing_type=None,
create_key=_descriptor._internal_create_key,
fields=[
_descriptor.FieldDescriptor(
name='kasme', full_name='magma.lte.oai.AuthVector.kasme', index=0,
number=1, type=12, cpp_type=9, label=1,
has_default_value=False, default_value=b"",
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key),
_descriptor.FieldDescriptor(
name='rand', full_name='magma.lte.oai.AuthVector.rand', index=1,
number=2, type=12, cpp_type=9, label=1,
has_default_value=False, default_value=b"",
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key),
_descriptor.FieldDescriptor(
name='autn', full_name='magma.lte.oai.AuthVector.autn', index=2,
number=3, type=12, cpp_type=9, label=1,
has_default_value=False, default_value=b"",
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key),
_descriptor.FieldDescriptor(
name='xres', full_name='magma.lte.oai.AuthVector.xres', index=3,
number=4, type=12, cpp_type=9, label=1,
has_default_value=False, default_value=b"",
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key),
],
extensions=[
],
nested_types=[],
enum_types=[
],
serialized_options=None,
is_extendable=False,
syntax='proto3',
extension_ranges=[],
oneofs=[
],
serialized_start=5029,
serialized_end=5098,
)
_UENETWORKCAPABILITY = _descriptor.Descriptor(
name='UeNetworkCapability',
full_name='magma.lte.oai.UeNetworkCapability',
filename=None,
file=DESCRIPTOR,
containing_type=None,
create_key=_descriptor._internal_create_key,
fields=[
_descriptor.FieldDescriptor(
name='eea', full_name='magma.lte.oai.UeNetworkCapability.eea', index=0,
number=1, type=13, cpp_type=3, label=1,
has_default_value=False, default_value=0,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key),
_descriptor.FieldDescriptor(
name='eia', full_name='magma.lte.oai.UeNetworkCapability.eia', index=1,
number=2, type=13, cpp_type=3, label=1,
has_default_value=False, default_value=0,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key),
_descriptor.FieldDescriptor(
name='uea', full_name='magma.lte.oai.UeNetworkCapability.uea', index=2,
number=3, type=13, cpp_type=3, label=1,
has_default_value=False, default_value=0,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key),
_descriptor.FieldDescriptor(
name='ucs2', full_name='magma.lte.oai.UeNetworkCapability.ucs2', index=3,
number=4, type=13, cpp_type=3, label=1,
has_default_value=False, default_value=0,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key),
_descriptor.FieldDescriptor(
name='uia', full_name='magma.lte.oai.UeNetworkCapability.uia', index=4,
number=5, type=13, cpp_type=3, label=1,
has_default_value=False, default_value=0,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key),
_descriptor.FieldDescriptor(
name='spare', full_name='magma.lte.oai.UeNetworkCapability.spare', index=5,
number=6, type=13, cpp_type=3, label=1,
has_default_value=False, default_value=0,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=b'\030\001', file=DESCRIPTOR, create_key=_descriptor._internal_create_key),
_descriptor.FieldDescriptor(
name='csfb', full_name='magma.lte.oai.UeNetworkCapability.csfb', index=6,
number=7, type=13, cpp_type=3, label=1,
has_default_value=False, default_value=0,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key),
_descriptor.FieldDescriptor(
name='lpp', full_name='magma.lte.oai.UeNetworkCapability.lpp', index=7,
number=8, type=13, cpp_type=3, label=1,
has_default_value=False, default_value=0,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key),
_descriptor.FieldDescriptor(
name='lcs', full_name='magma.lte.oai.UeNetworkCapability.lcs', index=8,
number=9, type=13, cpp_type=3, label=1,
has_default_value=False, default_value=0,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key),
_descriptor.FieldDescriptor(
name='srvcc', full_name='magma.lte.oai.UeNetworkCapability.srvcc', index=9,
number=10, type=13, cpp_type=3, label=1,
has_default_value=False, default_value=0,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key),
_descriptor.FieldDescriptor(
name='nf', full_name='magma.lte.oai.UeNetworkCapability.nf', index=10,
number=11, type=13, cpp_type=3, label=1,
has_default_value=False, default_value=0,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key),
_descriptor.FieldDescriptor(
name='prosedd', full_name='magma.lte.oai.UeNetworkCapability.prosedd', index=11,
number=12, type=13, cpp_type=3, label=1,
has_default_value=False, default_value=0,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key),
_descriptor.FieldDescriptor(
name='prose', full_name='magma.lte.oai.UeNetworkCapability.prose', index=12,
number=13, type=13, cpp_type=3, label=1,
has_default_value=False, default_value=0,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key),
_descriptor.FieldDescriptor(
name='h245ash', full_name='magma.lte.oai.UeNetworkCapability.h245ash', index=13,
number=14, type=13, cpp_type=3, label=1,
has_default_value=False, default_value=0,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key),
_descriptor.FieldDescriptor(
name='epco', full_name='magma.lte.oai.UeNetworkCapability.epco', index=14,
number=15, type=13, cpp_type=3, label=1,
has_default_value=False, default_value=0,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key),
_descriptor.FieldDescriptor(
name='hccpciot', full_name='magma.lte.oai.UeNetworkCapability.hccpciot', index=15,
number=16, type=13, cpp_type=3, label=1,
has_default_value=False, default_value=0,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key),
_descriptor.FieldDescriptor(
name='erwfopdn', full_name='magma.lte.oai.UeNetworkCapability.erwfopdn', index=16,
number=17, type=13, cpp_type=3, label=1,
has_default_value=False, default_value=0,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key),
_descriptor.FieldDescriptor(
name='s1udata', full_name='magma.lte.oai.UeNetworkCapability.s1udata', index=17,
number=18, type=13, cpp_type=3, label=1,
has_default_value=False, default_value=0,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key),
_descriptor.FieldDescriptor(
name='upciot', full_name='magma.lte.oai.UeNetworkCapability.upciot', index=18,
number=19, type=13, cpp_type=3, label=1,
has_default_value=False, default_value=0,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key),
_descriptor.FieldDescriptor(
name='umts_present', full_name='magma.lte.oai.UeNetworkCapability.umts_present', index=19,
number=20, type=8, cpp_type=7, label=1,
has_default_value=False, default_value=False,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key),
_descriptor.FieldDescriptor(
name='misc_present', full_name='magma.lte.oai.UeNetworkCapability.misc_present', index=20,
number=21, type=8, cpp_type=7, label=1,
has_default_value=False, default_value=False,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=b'\030\001', file=DESCRIPTOR, create_key=_descriptor._internal_create_key),
_descriptor.FieldDescriptor(
name='cpciot', full_name='magma.lte.oai.UeNetworkCapability.cpciot', index=21,
number=22, type=13, cpp_type=3, label=1,
has_default_value=False, default_value=0,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key),
_descriptor.FieldDescriptor(
name='proserelay', full_name='magma.lte.oai.UeNetworkCapability.proserelay', index=22,
number=23, type=13, cpp_type=3, label=1,
has_default_value=False, default_value=0,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key),
_descriptor.FieldDescriptor(
name='prosedc', full_name='magma.lte.oai.UeNetworkCapability.prosedc', index=23,
number=24, type=13, cpp_type=3, label=1,
has_default_value=False, default_value=0,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key),
_descriptor.FieldDescriptor(
name='bearer', full_name='magma.lte.oai.UeNetworkCapability.bearer', index=24,
number=25, type=13, cpp_type=3, label=1,
has_default_value=False, default_value=0,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key),
_descriptor.FieldDescriptor(
name='sgc', full_name='magma.lte.oai.UeNetworkCapability.sgc', index=25,
number=26, type=13, cpp_type=3, label=1,
has_default_value=False, default_value=0,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key),
_descriptor.FieldDescriptor(
name='n1mod', full_name='magma.lte.oai.UeNetworkCapability.n1mod', index=26,
number=27, type=13, cpp_type=3, label=1,
has_default_value=False, default_value=0,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key),
_descriptor.FieldDescriptor(
name='dcnr', full_name='magma.lte.oai.UeNetworkCapability.dcnr', index=27,
number=28, type=13, cpp_type=3, label=1,
has_default_value=False, default_value=0,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key),
_descriptor.FieldDescriptor(
name='cpbackoff', full_name='magma.lte.oai.UeNetworkCapability.cpbackoff', index=28,
number=29, type=13, cpp_type=3, label=1,
has_default_value=False, default_value=0,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key),
_descriptor.FieldDescriptor(
name='restrictec', full_name='magma.lte.oai.UeNetworkCapability.restrictec', index=29,
number=30, type=13, cpp_type=3, label=1,
has_default_value=False, default_value=0,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key),
_descriptor.FieldDescriptor(
name='v2xpc5', full_name='magma.lte.oai.UeNetworkCapability.v2xpc5', index=30,
number=31, type=13, cpp_type=3, label=1,
has_default_value=False, default_value=0,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key),
_descriptor.FieldDescriptor(
name='multipledrb', full_name='magma.lte.oai.UeNetworkCapability.multipledrb', index=31,
number=32, type=13, cpp_type=3, label=1,
has_default_value=False, default_value=0,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key),
_descriptor.FieldDescriptor(
name='length', full_name='magma.lte.oai.UeNetworkCapability.length', index=32,
number=33, type=13, cpp_type=3, label=1,
has_default_value=False, default_value=0,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key),
],
extensions=[
],
nested_types=[],
enum_types=[
],
serialized_options=None,
is_extendable=False,
syntax='proto3',
extension_ranges=[],
oneofs=[
],
serialized_start=5101,
serialized_end=5657,
)
_UEADDITIONALSECURITYCAPABILITY = _descriptor.Descriptor(
name='UeAdditionalSecurityCapability',
full_name='magma.lte.oai.UeAdditionalSecurityCapability',
filename=None,
file=DESCRIPTOR,
containing_type=None,
create_key=_descriptor._internal_create_key,
fields=[
_descriptor.FieldDescriptor(
name='ea', full_name='magma.lte.oai.UeAdditionalSecurityCapability.ea', index=0,
number=1, type=13, cpp_type=3, label=1,
has_default_value=False, default_value=0,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key),
_descriptor.FieldDescriptor(
name='ia', full_name='magma.lte.oai.UeAdditionalSecurityCapability.ia', index=1,
number=2, type=13, cpp_type=3, label=1,
has_default_value=False, default_value=0,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key),
],
extensions=[
],
nested_types=[],
enum_types=[
],
serialized_options=None,
is_extendable=False,
syntax='proto3',
extension_ranges=[],
oneofs=[
],
serialized_start=5659,
serialized_end=5715,
)
_BEARERQOS = _descriptor.Descriptor(
name='BearerQos',
full_name='magma.lte.oai.BearerQos',
filename=None,
file=DESCRIPTOR,
containing_type=None,
create_key=_descriptor._internal_create_key,
fields=[
_descriptor.FieldDescriptor(
name='pci', full_name='magma.lte.oai.BearerQos.pci', index=0,
number=1, type=13, cpp_type=3, label=1,
has_default_value=False, default_value=0,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key),
_descriptor.FieldDescriptor(
name='pl', full_name='magma.lte.oai.BearerQos.pl', index=1,
number=2, type=13, cpp_type=3, label=1,
has_default_value=False, default_value=0,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key),
_descriptor.FieldDescriptor(
name='pvi', full_name='magma.lte.oai.BearerQos.pvi', index=2,
number=3, type=13, cpp_type=3, label=1,
has_default_value=False, default_value=0,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key),
_descriptor.FieldDescriptor(
name='qci', full_name='magma.lte.oai.BearerQos.qci', index=3,
number=4, type=13, cpp_type=3, label=1,
has_default_value=False, default_value=0,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key),
_descriptor.FieldDescriptor(
name='gbr', full_name='magma.lte.oai.BearerQos.gbr', index=4,
number=5, type=11, cpp_type=10, label=1,
has_default_value=False, default_value=None,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key),
_descriptor.FieldDescriptor(
name='mbr', full_name='magma.lte.oai.BearerQos.mbr', index=5,
number=6, type=11, cpp_type=10, label=1,
has_default_value=False, default_value=None,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key),
],
extensions=[
],
nested_types=[],
enum_types=[
],
serialized_options=None,
is_extendable=False,
syntax='proto3',
extension_ranges=[],
oneofs=[
],
serialized_start=5718,
serialized_end=5848,
)
_PCOPROTOCOLORCONTAINERID = _descriptor.Descriptor(
name='PcoProtocolOrContainerId',
full_name='magma.lte.oai.PcoProtocolOrContainerId',
filename=None,
file=DESCRIPTOR,
containing_type=None,
create_key=_descriptor._internal_create_key,
fields=[
_descriptor.FieldDescriptor(
name='id', full_name='magma.lte.oai.PcoProtocolOrContainerId.id', index=0,
number=1, type=13, cpp_type=3, label=1,
has_default_value=False, default_value=0,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key),
_descriptor.FieldDescriptor(
name='length', full_name='magma.lte.oai.PcoProtocolOrContainerId.length', index=1,
number=2, type=13, cpp_type=3, label=1,
has_default_value=False, default_value=0,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key),
_descriptor.FieldDescriptor(
name='contents', full_name='magma.lte.oai.PcoProtocolOrContainerId.contents', index=2,
number=3, type=12, cpp_type=9, label=1,
has_default_value=False, default_value=b"",
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key),
],
extensions=[
],
nested_types=[],
enum_types=[
],
serialized_options=None,
is_extendable=False,
syntax='proto3',
extension_ranges=[],
oneofs=[
],
serialized_start=5850,
serialized_end=5922,
)
_PROTOCOLCONFIGURATIONOPTIONS = _descriptor.Descriptor(
name='ProtocolConfigurationOptions',
full_name='magma.lte.oai.ProtocolConfigurationOptions',
filename=None,
file=DESCRIPTOR,
containing_type=None,
create_key=_descriptor._internal_create_key,
fields=[
_descriptor.FieldDescriptor(
name='ext', full_name='magma.lte.oai.ProtocolConfigurationOptions.ext', index=0,
number=1, type=13, cpp_type=3, label=1,
has_default_value=False, default_value=0,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key),
_descriptor.FieldDescriptor(
name='spare', full_name='magma.lte.oai.ProtocolConfigurationOptions.spare', index=1,
number=2, type=13, cpp_type=3, label=1,
has_default_value=False, default_value=0,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key),
_descriptor.FieldDescriptor(
name='config_protocol', full_name='magma.lte.oai.ProtocolConfigurationOptions.config_protocol', index=2,
number=3, type=13, cpp_type=3, label=1,
has_default_value=False, default_value=0,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key),
_descriptor.FieldDescriptor(
name='num_protocol_or_container_id', full_name='magma.lte.oai.ProtocolConfigurationOptions.num_protocol_or_container_id', index=3,
number=4, type=13, cpp_type=3, label=1,
has_default_value=False, default_value=0,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key),
_descriptor.FieldDescriptor(
name='proto_or_container_id', full_name='magma.lte.oai.ProtocolConfigurationOptions.proto_or_container_id', index=4,
number=5, type=11, cpp_type=10, label=3,
has_default_value=False, default_value=[],
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key),
],
extensions=[
],
nested_types=[],
enum_types=[
],
serialized_options=None,
is_extendable=False,
syntax='proto3',
extension_ranges=[],
oneofs=[
],
serialized_start=5925,
serialized_end=6118,
)
_ESMEBRTIMERDATA = _descriptor.Descriptor(
name='EsmEbrTimerData',
full_name='magma.lte.oai.EsmEbrTimerData',
filename=None,
file=DESCRIPTOR,
containing_type=None,
create_key=_descriptor._internal_create_key,
fields=[
_descriptor.FieldDescriptor(
name='ue_id', full_name='magma.lte.oai.EsmEbrTimerData.ue_id', index=0,
number=1, type=13, cpp_type=3, label=1,
has_default_value=False, default_value=0,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key),
_descriptor.FieldDescriptor(
name='ebi', full_name='magma.lte.oai.EsmEbrTimerData.ebi', index=1,
number=2, type=13, cpp_type=3, label=1,
has_default_value=False, default_value=0,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key),
_descriptor.FieldDescriptor(
name='count', full_name='magma.lte.oai.EsmEbrTimerData.count', index=2,
number=3, type=13, cpp_type=3, label=1,
has_default_value=False, default_value=0,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key),
_descriptor.FieldDescriptor(
name='esm_msg', full_name='magma.lte.oai.EsmEbrTimerData.esm_msg', index=3,
number=4, type=12, cpp_type=9, label=1,
has_default_value=False, default_value=b"",
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key),
],
extensions=[
],
nested_types=[],
enum_types=[
],
serialized_options=None,
is_extendable=False,
syntax='proto3',
extension_ranges=[],
oneofs=[
],
serialized_start=6120,
serialized_end=6197,
)
_ESMPROCDATA = _descriptor.Descriptor(
name='EsmProcData',
full_name='magma.lte.oai.EsmProcData',
filename=None,
file=DESCRIPTOR,
containing_type=None,
create_key=_descriptor._internal_create_key,
fields=[
_descriptor.FieldDescriptor(
name='pti', full_name='magma.lte.oai.EsmProcData.pti', index=0,
number=1, type=13, cpp_type=3, label=1,
has_default_value=False, default_value=0,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key),
_descriptor.FieldDescriptor(
name='request_type', full_name='magma.lte.oai.EsmProcData.request_type', index=1,
number=2, type=13, cpp_type=3, label=1,
has_default_value=False, default_value=0,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key),
_descriptor.FieldDescriptor(
name='apn', full_name='magma.lte.oai.EsmProcData.apn', index=2,
number=3, type=9, cpp_type=9, label=1,
has_default_value=False, default_value=b"".decode('utf-8'),
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key),
_descriptor.FieldDescriptor(
name='pdn_cid', full_name='magma.lte.oai.EsmProcData.pdn_cid', index=3,
number=4, type=13, cpp_type=3, label=1,
has_default_value=False, default_value=0,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key),
_descriptor.FieldDescriptor(
name='pdn_type', full_name='magma.lte.oai.EsmProcData.pdn_type', index=4,
number=5, type=13, cpp_type=3, label=1,
has_default_value=False, default_value=0,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key),
_descriptor.FieldDescriptor(
name='pdn_addr', full_name='magma.lte.oai.EsmProcData.pdn_addr', index=5,
number=6, type=9, cpp_type=9, label=1,
has_default_value=False, default_value=b"".decode('utf-8'),
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key),
_descriptor.FieldDescriptor(
name='bearer_qos', full_name='magma.lte.oai.EsmProcData.bearer_qos', index=6,
number=7, type=11, cpp_type=10, label=1,
has_default_value=False, default_value=None,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key),
_descriptor.FieldDescriptor(
name='pco', full_name='magma.lte.oai.EsmProcData.pco', index=7,
number=8, type=11, cpp_type=10, label=1,
has_default_value=False, default_value=None,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key),
],
extensions=[
],
nested_types=[],
enum_types=[
],
serialized_options=None,
is_extendable=False,
syntax='proto3',
extension_ranges=[],
oneofs=[
],
serialized_start=6200,
serialized_end=6418,
)
_ESMCONTEXT = _descriptor.Descriptor(
name='EsmContext',
full_name='magma.lte.oai.EsmContext',
filename=None,
file=DESCRIPTOR,
containing_type=None,
create_key=_descriptor._internal_create_key,
fields=[
_descriptor.FieldDescriptor(
name='n_active_ebrs', full_name='magma.lte.oai.EsmContext.n_active_ebrs', index=0,
number=1, type=13, cpp_type=3, label=1,
has_default_value=False, default_value=0,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key),
_descriptor.FieldDescriptor(
name='is_emergency', full_name='magma.lte.oai.EsmContext.is_emergency', index=1,
number=4, type=8, cpp_type=7, label=1,
has_default_value=False, default_value=False,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key),
_descriptor.FieldDescriptor(
name='esm_proc_data', full_name='magma.lte.oai.EsmContext.esm_proc_data', index=2,
number=5, type=11, cpp_type=10, label=1,
has_default_value=False, default_value=None,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key),
_descriptor.FieldDescriptor(
name='t3489', full_name='magma.lte.oai.EsmContext.t3489', index=3,
number=199, type=11, cpp_type=10, label=1,
has_default_value=False, default_value=None,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key),
_descriptor.FieldDescriptor(
name='is_standalone', full_name='magma.lte.oai.EsmContext.is_standalone', index=4,
number=200, type=8, cpp_type=7, label=1,
has_default_value=False, default_value=False,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=b'\030\001', file=DESCRIPTOR, create_key=_descriptor._internal_create_key),
_descriptor.FieldDescriptor(
name='is_pdn_disconnect', full_name='magma.lte.oai.EsmContext.is_pdn_disconnect', index=5,
number=201, type=8, cpp_type=7, label=1,
has_default_value=False, default_value=False,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key),
_descriptor.FieldDescriptor(
name='pending_standalone', full_name='magma.lte.oai.EsmContext.pending_standalone', index=6,
number=202, type=5, cpp_type=1, label=1,
has_default_value=False, default_value=0,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key),
],
extensions=[
],
nested_types=[],
enum_types=[
],
serialized_options=None,
is_extendable=False,
syntax='proto3',
extension_ranges=[],
oneofs=[
],
serialized_start=6421,
serialized_end=6652,
)
_ESMEBRCONTEXT = _descriptor.Descriptor(
name='EsmEbrContext',
full_name='magma.lte.oai.EsmEbrContext',
filename=None,
file=DESCRIPTOR,
containing_type=None,
create_key=_descriptor._internal_create_key,
fields=[
_descriptor.FieldDescriptor(
name='status', full_name='magma.lte.oai.EsmEbrContext.status', index=0,
number=1, type=13, cpp_type=3, label=1,
has_default_value=False, default_value=0,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key),
_descriptor.FieldDescriptor(
name='gbr_dl', full_name='magma.lte.oai.EsmEbrContext.gbr_dl', index=1,
number=2, type=4, cpp_type=4, label=1,
has_default_value=False, default_value=0,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key),
_descriptor.FieldDescriptor(
name='gbr_ul', full_name='magma.lte.oai.EsmEbrContext.gbr_ul', index=2,
number=3, type=4, cpp_type=4, label=1,
has_default_value=False, default_value=0,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key),
_descriptor.FieldDescriptor(
name='mbr_dl', full_name='magma.lte.oai.EsmEbrContext.mbr_dl', index=3,
number=4, type=4, cpp_type=4, label=1,
has_default_value=False, default_value=0,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key),
_descriptor.FieldDescriptor(
name='mbr_ul', full_name='magma.lte.oai.EsmEbrContext.mbr_ul', index=4,
number=5, type=4, cpp_type=4, label=1,
has_default_value=False, default_value=0,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key),
_descriptor.FieldDescriptor(
name='tft', full_name='magma.lte.oai.EsmEbrContext.tft', index=5,
number=6, type=11, cpp_type=10, label=1,
has_default_value=False, default_value=None,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key),
_descriptor.FieldDescriptor(
name='pco', full_name='magma.lte.oai.EsmEbrContext.pco', index=6,
number=7, type=11, cpp_type=10, label=1,
has_default_value=False, default_value=None,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key),
_descriptor.FieldDescriptor(
name='timer', full_name='magma.lte.oai.EsmEbrContext.timer', index=7,
number=100, type=11, cpp_type=10, label=1,
has_default_value=False, default_value=None,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key),
_descriptor.FieldDescriptor(
name='esm_ebr_timer_data', full_name='magma.lte.oai.EsmEbrContext.esm_ebr_timer_data', index=8,
number=101, type=11, cpp_type=10, label=1,
has_default_value=False, default_value=None,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key),
],
extensions=[
],
nested_types=[],
enum_types=[
],
serialized_options=None,
is_extendable=False,
syntax='proto3',
extension_ranges=[],
oneofs=[
],
serialized_start=6655,
serialized_end=6954,
)
_NWDETACHDATA = _descriptor.Descriptor(
name='NwDetachData',
full_name='magma.lte.oai.NwDetachData',
filename=None,
file=DESCRIPTOR,
containing_type=None,
create_key=_descriptor._internal_create_key,
fields=[
_descriptor.FieldDescriptor(
name='ue_id', full_name='magma.lte.oai.NwDetachData.ue_id', index=0,
number=1, type=13, cpp_type=3, label=1,
has_default_value=False, default_value=0,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key),
_descriptor.FieldDescriptor(
name='retransmission_count', full_name='magma.lte.oai.NwDetachData.retransmission_count', index=1,
number=2, type=13, cpp_type=3, label=1,
has_default_value=False, default_value=0,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key),
_descriptor.FieldDescriptor(
name='detach_type', full_name='magma.lte.oai.NwDetachData.detach_type', index=2,
number=3, type=13, cpp_type=3, label=1,
has_default_value=False, default_value=0,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key),
],
extensions=[
],
nested_types=[],
enum_types=[
],
serialized_options=None,
is_extendable=False,
syntax='proto3',
extension_ranges=[],
oneofs=[
],
serialized_start=6956,
serialized_end=7036,
)
_NEWATTACHINFO = _descriptor.Descriptor(
name='NewAttachInfo',
full_name='magma.lte.oai.NewAttachInfo',
filename=None,
file=DESCRIPTOR,
containing_type=None,
create_key=_descriptor._internal_create_key,
fields=[
_descriptor.FieldDescriptor(
name='mme_ue_s1ap_id', full_name='magma.lte.oai.NewAttachInfo.mme_ue_s1ap_id', index=0,
number=1, type=13, cpp_type=3, label=1,
has_default_value=False, default_value=0,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key),
_descriptor.FieldDescriptor(
name='is_mm_ctx_new', full_name='magma.lte.oai.NewAttachInfo.is_mm_ctx_new', index=1,
number=2, type=8, cpp_type=7, label=1,
has_default_value=False, default_value=False,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key),
_descriptor.FieldDescriptor(
name='ies', full_name='magma.lte.oai.NewAttachInfo.ies', index=2,
number=3, type=11, cpp_type=10, label=1,
has_default_value=False, default_value=None,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key),
],
extensions=[
],
nested_types=[],
enum_types=[
],
serialized_options=None,
is_extendable=False,
syntax='proto3',
extension_ranges=[],
oneofs=[
],
serialized_start=7038,
serialized_end=7146,
)
_EMMCONTEXT = _descriptor.Descriptor(
name='EmmContext',
full_name='magma.lte.oai.EmmContext',
filename=None,
file=DESCRIPTOR,
containing_type=None,
create_key=_descriptor._internal_create_key,
fields=[
_descriptor.FieldDescriptor(
name='imsi64', full_name='magma.lte.oai.EmmContext.imsi64', index=0,
number=1, type=4, cpp_type=4, label=1,
has_default_value=False, default_value=0,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key),
_descriptor.FieldDescriptor(
name='imsi', full_name='magma.lte.oai.EmmContext.imsi', index=1,
number=2, type=11, cpp_type=10, label=1,
has_default_value=False, default_value=None,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key),
_descriptor.FieldDescriptor(
name='saved_imsi64', full_name='magma.lte.oai.EmmContext.saved_imsi64', index=2,
number=3, type=4, cpp_type=4, label=1,
has_default_value=False, default_value=0,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key),
_descriptor.FieldDescriptor(
name='imei', full_name='magma.lte.oai.EmmContext.imei', index=3,
number=4, type=11, cpp_type=10, label=1,
has_default_value=False, default_value=None,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key),
_descriptor.FieldDescriptor(
name='imeisv', full_name='magma.lte.oai.EmmContext.imeisv', index=4,
number=5, type=11, cpp_type=10, label=1,
has_default_value=False, default_value=None,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key),
_descriptor.FieldDescriptor(
name='emm_cause', full_name='magma.lte.oai.EmmContext.emm_cause', index=5,
number=6, type=13, cpp_type=3, label=1,
has_default_value=False, default_value=0,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key),
_descriptor.FieldDescriptor(
name='emm_fsm_state', full_name='magma.lte.oai.EmmContext.emm_fsm_state', index=6,
number=7, type=13, cpp_type=3, label=1,
has_default_value=False, default_value=0,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key),
_descriptor.FieldDescriptor(
name='attach_type', full_name='magma.lte.oai.EmmContext.attach_type', index=7,
number=8, type=13, cpp_type=3, label=1,
has_default_value=False, default_value=0,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key),
_descriptor.FieldDescriptor(
name='emm_procedures', full_name='magma.lte.oai.EmmContext.emm_procedures', index=8,
number=11, type=11, cpp_type=10, label=1,
has_default_value=False, default_value=None,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key),
_descriptor.FieldDescriptor(
name='common_proc_mask', full_name='magma.lte.oai.EmmContext.common_proc_mask', index=9,
number=12, type=13, cpp_type=3, label=1,
has_default_value=False, default_value=0,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key),
_descriptor.FieldDescriptor(
name='esm_ctx', full_name='magma.lte.oai.EmmContext.esm_ctx', index=10,
number=13, type=11, cpp_type=10, label=1,
has_default_value=False, default_value=None,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key),
_descriptor.FieldDescriptor(
name='member_present_mask', full_name='magma.lte.oai.EmmContext.member_present_mask', index=11,
number=21, type=13, cpp_type=3, label=1,
has_default_value=False, default_value=0,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key),
_descriptor.FieldDescriptor(
name='member_valid_mask', full_name='magma.lte.oai.EmmContext.member_valid_mask', index=12,
number=22, type=13, cpp_type=3, label=1,
has_default_value=False, default_value=0,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key),
_descriptor.FieldDescriptor(
name='vector', full_name='magma.lte.oai.EmmContext.vector', index=13,
number=23, type=11, cpp_type=10, label=3,
has_default_value=False, default_value=[],
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key),
_descriptor.FieldDescriptor(
name='security', full_name='magma.lte.oai.EmmContext.security', index=14,
number=24, type=11, cpp_type=10, label=1,
has_default_value=False, default_value=None,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key),
_descriptor.FieldDescriptor(
name='_non_current_security', full_name='magma.lte.oai.EmmContext._non_current_security', index=15,
number=25, type=11, cpp_type=10, label=1,
has_default_value=False, default_value=None,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key),
_descriptor.FieldDescriptor(
name='is_dynamic', full_name='magma.lte.oai.EmmContext.is_dynamic', index=16,
number=31, type=8, cpp_type=7, label=1,
has_default_value=False, default_value=False,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key),
_descriptor.FieldDescriptor(
name='is_attached', full_name='magma.lte.oai.EmmContext.is_attached', index=17,
number=32, type=8, cpp_type=7, label=1,
has_default_value=False, default_value=False,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key),
_descriptor.FieldDescriptor(
name='is_initial_identity_imsi', full_name='magma.lte.oai.EmmContext.is_initial_identity_imsi', index=18,
number=33, type=8, cpp_type=7, label=1,
has_default_value=False, default_value=False,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key),
_descriptor.FieldDescriptor(
name='is_guti_based_attach', full_name='magma.lte.oai.EmmContext.is_guti_based_attach', index=19,
number=34, type=8, cpp_type=7, label=1,
has_default_value=False, default_value=False,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key),
_descriptor.FieldDescriptor(
name='is_guti_set', full_name='magma.lte.oai.EmmContext.is_guti_set', index=20,
number=35, type=8, cpp_type=7, label=1,
has_default_value=False, default_value=False,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key),
_descriptor.FieldDescriptor(
name='is_imsi_only_detach', full_name='magma.lte.oai.EmmContext.is_imsi_only_detach', index=21,
number=36, type=8, cpp_type=7, label=1,
has_default_value=False, default_value=False,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key),
_descriptor.FieldDescriptor(
name='is_emergency', full_name='magma.lte.oai.EmmContext.is_emergency', index=22,
number=37, type=8, cpp_type=7, label=1,
has_default_value=False, default_value=False,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key),
_descriptor.FieldDescriptor(
name='additional_update_type', full_name='magma.lte.oai.EmmContext.additional_update_type', index=23,
number=41, type=13, cpp_type=3, label=1,
has_default_value=False, default_value=0,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key),
_descriptor.FieldDescriptor(
name='tau_updt_type', full_name='magma.lte.oai.EmmContext.tau_updt_type', index=24,
number=42, type=13, cpp_type=3, label=1,
has_default_value=False, default_value=0,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key),
_descriptor.FieldDescriptor(
name='num_attach_request', full_name='magma.lte.oai.EmmContext.num_attach_request', index=25,
number=43, type=13, cpp_type=3, label=1,
has_default_value=False, default_value=0,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key),
_descriptor.FieldDescriptor(
name='guti', full_name='magma.lte.oai.EmmContext.guti', index=26,
number=51, type=11, cpp_type=10, label=1,
has_default_value=False, default_value=None,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key),
_descriptor.FieldDescriptor(
name='old_guti', full_name='magma.lte.oai.EmmContext.old_guti', index=27,
number=52, type=11, cpp_type=10, label=1,
has_default_value=False, default_value=None,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key),
_descriptor.FieldDescriptor(
name='tai_list', full_name='magma.lte.oai.EmmContext.tai_list', index=28,
number=61, type=11, cpp_type=10, label=1,
has_default_value=False, default_value=None,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key),
_descriptor.FieldDescriptor(
name='lvr_tai', full_name='magma.lte.oai.EmmContext.lvr_tai', index=29,
number=62, type=11, cpp_type=10, label=1,
has_default_value=False, default_value=None,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key),
_descriptor.FieldDescriptor(
name='originating_tai', full_name='magma.lte.oai.EmmContext.originating_tai', index=30,
number=63, type=11, cpp_type=10, label=1,
has_default_value=False, default_value=None,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key),
_descriptor.FieldDescriptor(
name='ksi', full_name='magma.lte.oai.EmmContext.ksi', index=31,
number=64, type=13, cpp_type=3, label=1,
has_default_value=False, default_value=0,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key),
_descriptor.FieldDescriptor(
name='ue_network_capability', full_name='magma.lte.oai.EmmContext.ue_network_capability', index=32,
number=65, type=11, cpp_type=10, label=1,
has_default_value=False, default_value=None,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key),
_descriptor.FieldDescriptor(
name='nw_detach_data', full_name='magma.lte.oai.EmmContext.nw_detach_data', index=33,
number=66, type=11, cpp_type=10, label=1,
has_default_value=False, default_value=None,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key),
_descriptor.FieldDescriptor(
name='new_attach_info', full_name='magma.lte.oai.EmmContext.new_attach_info', index=34,
number=67, type=11, cpp_type=10, label=1,
has_default_value=False, default_value=None,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key),
_descriptor.FieldDescriptor(
name='ue_additional_security_capability', full_name='magma.lte.oai.EmmContext.ue_additional_security_capability', index=35,
number=68, type=11, cpp_type=10, label=1,
has_default_value=False, default_value=None,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key),
],
extensions=[
],
nested_types=[],
enum_types=[
],
serialized_options=None,
is_extendable=False,
syntax='proto3',
extension_ranges=[],
oneofs=[
],
serialized_start=7149,
serialized_end=8474,
)
_PARTIALTAILIST.fields_by_name['tai_many_plmn'].message_type = _TAI
_PARTIALTAILIST.fields_by_name['tai_one_plmn_consecutive_tacs'].message_type = _TAI
_TAILIST.fields_by_name['partial_tai_lists'].message_type = _PARTIALTAILIST
_NASEMMPROC.fields_by_name['base_proc'].message_type = _NASBASEPROC
_AUTHINFOPROC.fields_by_name['vector'].message_type = _AUTHVECTOR
_AUTHINFOPROC.fields_by_name['timer_s6a'].message_type = _TIMER
_NASCNPROC.fields_by_name['base_proc'].message_type = _NASBASEPROC
_NASCNPROC.fields_by_name['auth_info_proc'].message_type = _AUTHINFOPROC
_NASCNPROC.oneofs_by_name['MessageTypes'].fields.append(
_NASCNPROC.fields_by_name['auth_info_proc'])
_NASCNPROC.fields_by_name['auth_info_proc'].containing_oneof = _NASCNPROC.oneofs_by_name['MessageTypes']
_ATTACHREQUESTIES.fields_by_name['guti'].message_type = lte_dot_protos_dot_oai_dot_common__types__pb2._GUTI
_ATTACHREQUESTIES.fields_by_name['imsi'].message_type = _IDENTITYTUPLE
_ATTACHREQUESTIES.fields_by_name['imei'].message_type = _IDENTITYTUPLE
_ATTACHREQUESTIES.fields_by_name['last_visited_tai'].message_type = _TAI
_ATTACHREQUESTIES.fields_by_name['origin_tai'].message_type = _TAI
_ATTACHREQUESTIES.fields_by_name['origin_ecgi'].message_type = lte_dot_protos_dot_oai_dot_common__types__pb2._ECGI
_ATTACHREQUESTIES.fields_by_name['ue_nw_capability'].message_type = _UENETWORKCAPABILITY
_ATTACHREQUESTIES.fields_by_name['drx_parameter'].message_type = _DRXPARAMETER
_ATTACHREQUESTIES.fields_by_name['decode_status'].message_type = _NASMSGDECODESTATUS
_ATTACHREQUESTIES.fields_by_name['classmark2'].message_type = _MOBILESTACLASSMARK2
_ATTACHREQUESTIES.fields_by_name['voice_preference'].message_type = _VOICEPREFERENCE
_ATTACHREQUESTIES.fields_by_name['ue_additional_security_capability'].message_type = _UEADDITIONALSECURITYCAPABILITY
_ATTACHPROC.fields_by_name['guti'].message_type = lte_dot_protos_dot_oai_dot_common__types__pb2._GUTI
_ATTACHPROC.fields_by_name['ies'].message_type = _ATTACHREQUESTIES
_ATTACHPROC.fields_by_name['t3450'].message_type = _TIMER
_DETACHREQUESTIES.fields_by_name['guti'].message_type = lte_dot_protos_dot_oai_dot_common__types__pb2._GUTI
_DETACHREQUESTIES.fields_by_name['imsi'].message_type = _IDENTITYTUPLE
_DETACHREQUESTIES.fields_by_name['imei'].message_type = _IDENTITYTUPLE
_DETACHREQUESTIES.fields_by_name['decode_status'].message_type = _NASMSGDECODESTATUS
_AUTHPROC.fields_by_name['unchecked_imsi'].message_type = _IDENTITYTUPLE
_AUTHPROC.fields_by_name['T3460'].message_type = _TIMER
_NASEMMPROCWITHTYPE.fields_by_name['emm_proc'].message_type = _NASEMMPROC
_NASEMMPROCWITHTYPE.fields_by_name['attach_proc'].message_type = _ATTACHPROC
_NASEMMPROCWITHTYPE.fields_by_name['detach_proc'].message_type = _DETACHREQUESTIES
_NASEMMPROCWITHTYPE.fields_by_name['smc_proc'].message_type = _SMCPROC
_NASEMMPROCWITHTYPE.fields_by_name['auth_proc'].message_type = _AUTHPROC
_NASEMMPROCWITHTYPE.oneofs_by_name['MessageTypes'].fields.append(
_NASEMMPROCWITHTYPE.fields_by_name['attach_proc'])
_NASEMMPROCWITHTYPE.fields_by_name['attach_proc'].containing_oneof = _NASEMMPROCWITHTYPE.oneofs_by_name['MessageTypes']
_NASEMMPROCWITHTYPE.oneofs_by_name['MessageTypes'].fields.append(
_NASEMMPROCWITHTYPE.fields_by_name['detach_proc'])
_NASEMMPROCWITHTYPE.fields_by_name['detach_proc'].containing_oneof = _NASEMMPROCWITHTYPE.oneofs_by_name['MessageTypes']
_NASEMMPROCWITHTYPE.oneofs_by_name['MessageTypes'].fields.append(
_NASEMMPROCWITHTYPE.fields_by_name['smc_proc'])
_NASEMMPROCWITHTYPE.fields_by_name['smc_proc'].containing_oneof = _NASEMMPROCWITHTYPE.oneofs_by_name['MessageTypes']
_NASEMMPROCWITHTYPE.oneofs_by_name['MessageTypes'].fields.append(
_NASEMMPROCWITHTYPE.fields_by_name['auth_proc'])
_NASEMMPROCWITHTYPE.fields_by_name['auth_proc'].containing_oneof = _NASEMMPROCWITHTYPE.oneofs_by_name['MessageTypes']
_EMMPROCEDURES.fields_by_name['emm_specific_proc'].message_type = _NASEMMPROCWITHTYPE
_EMMPROCEDURES.fields_by_name['emm_common_proc'].message_type = _NASEMMPROCWITHTYPE
_EMMPROCEDURES.fields_by_name['cn_proc'].message_type = _NASCNPROC
_EMMPROCEDURES.fields_by_name['emm_con_mngt_proc'].message_type = _NASEMMPROCWITHTYPE
_EMMPROCEDURES.fields_by_name['nas_proc_mess_sign'].message_type = _NASPROCMESSSIGN
_EMMSECURITYCONTEXT_COUNT.containing_type = _EMMSECURITYCONTEXT
_EMMSECURITYCONTEXT_CAPABILITY.containing_type = _EMMSECURITYCONTEXT
_EMMSECURITYCONTEXT_SELECTEDALGORITHMS.containing_type = _EMMSECURITYCONTEXT
_EMMSECURITYCONTEXT.fields_by_name['dl_count'].message_type = _EMMSECURITYCONTEXT_COUNT
_EMMSECURITYCONTEXT.fields_by_name['ul_count'].message_type = _EMMSECURITYCONTEXT_COUNT
_EMMSECURITYCONTEXT.fields_by_name['kenb_ul_count'].message_type = _EMMSECURITYCONTEXT_COUNT
_EMMSECURITYCONTEXT.fields_by_name['capability'].message_type = _EMMSECURITYCONTEXT_CAPABILITY
_EMMSECURITYCONTEXT.fields_by_name['selected_algos'].message_type = _EMMSECURITYCONTEXT_SELECTEDALGORITHMS
_BEARERQOS.fields_by_name['gbr'].message_type = lte_dot_protos_dot_oai_dot_common__types__pb2._AMBR
_BEARERQOS.fields_by_name['mbr'].message_type = lte_dot_protos_dot_oai_dot_common__types__pb2._AMBR
_PROTOCOLCONFIGURATIONOPTIONS.fields_by_name['proto_or_container_id'].message_type = _PCOPROTOCOLORCONTAINERID
_ESMPROCDATA.fields_by_name['bearer_qos'].message_type = _BEARERQOS
_ESMPROCDATA.fields_by_name['pco'].message_type = _PROTOCOLCONFIGURATIONOPTIONS
_ESMCONTEXT.fields_by_name['esm_proc_data'].message_type = _ESMPROCDATA
_ESMCONTEXT.fields_by_name['t3489'].message_type = _TIMER
_ESMEBRCONTEXT.fields_by_name['tft'].message_type = lte_dot_protos_dot_oai_dot_spgw__state__pb2._TRAFFICFLOWTEMPLATE
_ESMEBRCONTEXT.fields_by_name['pco'].message_type = _PROTOCOLCONFIGURATIONOPTIONS
_ESMEBRCONTEXT.fields_by_name['timer'].message_type = _TIMER
_ESMEBRCONTEXT.fields_by_name['esm_ebr_timer_data'].message_type = _ESMEBRTIMERDATA
_NEWATTACHINFO.fields_by_name['ies'].message_type = _ATTACHREQUESTIES
_EMMCONTEXT.fields_by_name['imsi'].message_type = _IDENTITYTUPLE
_EMMCONTEXT.fields_by_name['imei'].message_type = _IDENTITYTUPLE
_EMMCONTEXT.fields_by_name['imeisv'].message_type = _IDENTITYTUPLE
_EMMCONTEXT.fields_by_name['emm_procedures'].message_type = _EMMPROCEDURES
_EMMCONTEXT.fields_by_name['esm_ctx'].message_type = _ESMCONTEXT
_EMMCONTEXT.fields_by_name['vector'].message_type = _AUTHVECTOR
_EMMCONTEXT.fields_by_name['security'].message_type = _EMMSECURITYCONTEXT
_EMMCONTEXT.fields_by_name['_non_current_security'].message_type = _EMMSECURITYCONTEXT
_EMMCONTEXT.fields_by_name['guti'].message_type = lte_dot_protos_dot_oai_dot_common__types__pb2._GUTI
_EMMCONTEXT.fields_by_name['old_guti'].message_type = lte_dot_protos_dot_oai_dot_common__types__pb2._GUTI
_EMMCONTEXT.fields_by_name['tai_list'].message_type = _TAILIST
_EMMCONTEXT.fields_by_name['lvr_tai'].message_type = _TAI
_EMMCONTEXT.fields_by_name['originating_tai'].message_type = _TAI
_EMMCONTEXT.fields_by_name['ue_network_capability'].message_type = _UENETWORKCAPABILITY
_EMMCONTEXT.fields_by_name['nw_detach_data'].message_type = _NWDETACHDATA
_EMMCONTEXT.fields_by_name['new_attach_info'].message_type = _NEWATTACHINFO
_EMMCONTEXT.fields_by_name['ue_additional_security_capability'].message_type = _UEADDITIONALSECURITYCAPABILITY
DESCRIPTOR.message_types_by_name['Timer'] = _TIMER
DESCRIPTOR.message_types_by_name['IdentityTuple'] = _IDENTITYTUPLE
DESCRIPTOR.message_types_by_name['PartialTaiList'] = _PARTIALTAILIST
DESCRIPTOR.message_types_by_name['TaiList'] = _TAILIST
DESCRIPTOR.message_types_by_name['Tai'] = _TAI
DESCRIPTOR.message_types_by_name['NasBaseProc'] = _NASBASEPROC
DESCRIPTOR.message_types_by_name['NasEmmProc'] = _NASEMMPROC
DESCRIPTOR.message_types_by_name['AuthInfoProc'] = _AUTHINFOPROC
DESCRIPTOR.message_types_by_name['NasCnProc'] = _NASCNPROC
DESCRIPTOR.message_types_by_name['MobileStaClassmark2'] = _MOBILESTACLASSMARK2
DESCRIPTOR.message_types_by_name['VoicePreference'] = _VOICEPREFERENCE
DESCRIPTOR.message_types_by_name['NasMsgDecodeStatus'] = _NASMSGDECODESTATUS
DESCRIPTOR.message_types_by_name['DrxParameter'] = _DRXPARAMETER
DESCRIPTOR.message_types_by_name['AttachRequestIes'] = _ATTACHREQUESTIES
DESCRIPTOR.message_types_by_name['AttachProc'] = _ATTACHPROC
DESCRIPTOR.message_types_by_name['DetachRequestIes'] = _DETACHREQUESTIES
DESCRIPTOR.message_types_by_name['TauRequestIes'] = _TAUREQUESTIES
DESCRIPTOR.message_types_by_name['NasTauProc'] = _NASTAUPROC
DESCRIPTOR.message_types_by_name['AuthProc'] = _AUTHPROC
DESCRIPTOR.message_types_by_name['SmcProc'] = _SMCPROC
DESCRIPTOR.message_types_by_name['NasEmmProcWithType'] = _NASEMMPROCWITHTYPE
DESCRIPTOR.message_types_by_name['NasProcMessSign'] = _NASPROCMESSSIGN
DESCRIPTOR.message_types_by_name['EmmProcedures'] = _EMMPROCEDURES
DESCRIPTOR.message_types_by_name['EmmCommonData'] = _EMMCOMMONDATA
DESCRIPTOR.message_types_by_name['EmmSecurityContext'] = _EMMSECURITYCONTEXT
DESCRIPTOR.message_types_by_name['AuthVector'] = _AUTHVECTOR
DESCRIPTOR.message_types_by_name['UeNetworkCapability'] = _UENETWORKCAPABILITY
DESCRIPTOR.message_types_by_name['UeAdditionalSecurityCapability'] = _UEADDITIONALSECURITYCAPABILITY
DESCRIPTOR.message_types_by_name['BearerQos'] = _BEARERQOS
DESCRIPTOR.message_types_by_name['PcoProtocolOrContainerId'] = _PCOPROTOCOLORCONTAINERID
DESCRIPTOR.message_types_by_name['ProtocolConfigurationOptions'] = _PROTOCOLCONFIGURATIONOPTIONS
DESCRIPTOR.message_types_by_name['EsmEbrTimerData'] = _ESMEBRTIMERDATA
DESCRIPTOR.message_types_by_name['EsmProcData'] = _ESMPROCDATA
DESCRIPTOR.message_types_by_name['EsmContext'] = _ESMCONTEXT
DESCRIPTOR.message_types_by_name['EsmEbrContext'] = _ESMEBRCONTEXT
DESCRIPTOR.message_types_by_name['NwDetachData'] = _NWDETACHDATA
DESCRIPTOR.message_types_by_name['NewAttachInfo'] = _NEWATTACHINFO
DESCRIPTOR.message_types_by_name['EmmContext'] = _EMMCONTEXT
_sym_db.RegisterFileDescriptor(DESCRIPTOR)
Timer = _reflection.GeneratedProtocolMessageType('Timer', (_message.Message,), {
'DESCRIPTOR' : _TIMER,
'__module__' : 'lte.protos.oai.nas_state_pb2'
# @@protoc_insertion_point(class_scope:magma.lte.oai.Timer)
})
_sym_db.RegisterMessage(Timer)
IdentityTuple = _reflection.GeneratedProtocolMessageType('IdentityTuple', (_message.Message,), {
'DESCRIPTOR' : _IDENTITYTUPLE,
'__module__' : 'lte.protos.oai.nas_state_pb2'
# @@protoc_insertion_point(class_scope:magma.lte.oai.IdentityTuple)
})
_sym_db.RegisterMessage(IdentityTuple)
PartialTaiList = _reflection.GeneratedProtocolMessageType('PartialTaiList', (_message.Message,), {
'DESCRIPTOR' : _PARTIALTAILIST,
'__module__' : 'lte.protos.oai.nas_state_pb2'
# @@protoc_insertion_point(class_scope:magma.lte.oai.PartialTaiList)
})
_sym_db.RegisterMessage(PartialTaiList)
TaiList = _reflection.GeneratedProtocolMessageType('TaiList', (_message.Message,), {
'DESCRIPTOR' : _TAILIST,
'__module__' : 'lte.protos.oai.nas_state_pb2'
# @@protoc_insertion_point(class_scope:magma.lte.oai.TaiList)
})
_sym_db.RegisterMessage(TaiList)
Tai = _reflection.GeneratedProtocolMessageType('Tai', (_message.Message,), {
'DESCRIPTOR' : _TAI,
'__module__' : 'lte.protos.oai.nas_state_pb2'
# @@protoc_insertion_point(class_scope:magma.lte.oai.Tai)
})
_sym_db.RegisterMessage(Tai)
NasBaseProc = _reflection.GeneratedProtocolMessageType('NasBaseProc', (_message.Message,), {
'DESCRIPTOR' : _NASBASEPROC,
'__module__' : 'lte.protos.oai.nas_state_pb2'
# @@protoc_insertion_point(class_scope:magma.lte.oai.NasBaseProc)
})
_sym_db.RegisterMessage(NasBaseProc)
NasEmmProc = _reflection.GeneratedProtocolMessageType('NasEmmProc', (_message.Message,), {
'DESCRIPTOR' : _NASEMMPROC,
'__module__' : 'lte.protos.oai.nas_state_pb2'
# @@protoc_insertion_point(class_scope:magma.lte.oai.NasEmmProc)
})
_sym_db.RegisterMessage(NasEmmProc)
AuthInfoProc = _reflection.GeneratedProtocolMessageType('AuthInfoProc', (_message.Message,), {
'DESCRIPTOR' : _AUTHINFOPROC,
'__module__' : 'lte.protos.oai.nas_state_pb2'
# @@protoc_insertion_point(class_scope:magma.lte.oai.AuthInfoProc)
})
_sym_db.RegisterMessage(AuthInfoProc)
NasCnProc = _reflection.GeneratedProtocolMessageType('NasCnProc', (_message.Message,), {
'DESCRIPTOR' : _NASCNPROC,
'__module__' : 'lte.protos.oai.nas_state_pb2'
# @@protoc_insertion_point(class_scope:magma.lte.oai.NasCnProc)
})
_sym_db.RegisterMessage(NasCnProc)
MobileStaClassmark2 = _reflection.GeneratedProtocolMessageType('MobileStaClassmark2', (_message.Message,), {
'DESCRIPTOR' : _MOBILESTACLASSMARK2,
'__module__' : 'lte.protos.oai.nas_state_pb2'
# @@protoc_insertion_point(class_scope:magma.lte.oai.MobileStaClassmark2)
})
_sym_db.RegisterMessage(MobileStaClassmark2)
VoicePreference = _reflection.GeneratedProtocolMessageType('VoicePreference', (_message.Message,), {
'DESCRIPTOR' : _VOICEPREFERENCE,
'__module__' : 'lte.protos.oai.nas_state_pb2'
# @@protoc_insertion_point(class_scope:magma.lte.oai.VoicePreference)
})
_sym_db.RegisterMessage(VoicePreference)
NasMsgDecodeStatus = _reflection.GeneratedProtocolMessageType('NasMsgDecodeStatus', (_message.Message,), {
'DESCRIPTOR' : _NASMSGDECODESTATUS,
'__module__' : 'lte.protos.oai.nas_state_pb2'
# @@protoc_insertion_point(class_scope:magma.lte.oai.NasMsgDecodeStatus)
})
_sym_db.RegisterMessage(NasMsgDecodeStatus)
DrxParameter = _reflection.GeneratedProtocolMessageType('DrxParameter', (_message.Message,), {
'DESCRIPTOR' : _DRXPARAMETER,
'__module__' : 'lte.protos.oai.nas_state_pb2'
# @@protoc_insertion_point(class_scope:magma.lte.oai.DrxParameter)
})
_sym_db.RegisterMessage(DrxParameter)
AttachRequestIes = _reflection.GeneratedProtocolMessageType('AttachRequestIes', (_message.Message,), {
'DESCRIPTOR' : _ATTACHREQUESTIES,
'__module__' : 'lte.protos.oai.nas_state_pb2'
# @@protoc_insertion_point(class_scope:magma.lte.oai.AttachRequestIes)
})
_sym_db.RegisterMessage(AttachRequestIes)
AttachProc = _reflection.GeneratedProtocolMessageType('AttachProc', (_message.Message,), {
'DESCRIPTOR' : _ATTACHPROC,
'__module__' : 'lte.protos.oai.nas_state_pb2'
# @@protoc_insertion_point(class_scope:magma.lte.oai.AttachProc)
})
_sym_db.RegisterMessage(AttachProc)
DetachRequestIes = _reflection.GeneratedProtocolMessageType('DetachRequestIes', (_message.Message,), {
'DESCRIPTOR' : _DETACHREQUESTIES,
'__module__' : 'lte.protos.oai.nas_state_pb2'
# @@protoc_insertion_point(class_scope:magma.lte.oai.DetachRequestIes)
})
_sym_db.RegisterMessage(DetachRequestIes)
TauRequestIes = _reflection.GeneratedProtocolMessageType('TauRequestIes', (_message.Message,), {
'DESCRIPTOR' : _TAUREQUESTIES,
'__module__' : 'lte.protos.oai.nas_state_pb2'
# @@protoc_insertion_point(class_scope:magma.lte.oai.TauRequestIes)
})
_sym_db.RegisterMessage(TauRequestIes)
NasTauProc = _reflection.GeneratedProtocolMessageType('NasTauProc', (_message.Message,), {
'DESCRIPTOR' : _NASTAUPROC,
'__module__' : 'lte.protos.oai.nas_state_pb2'
# @@protoc_insertion_point(class_scope:magma.lte.oai.NasTauProc)
})
_sym_db.RegisterMessage(NasTauProc)
AuthProc = _reflection.GeneratedProtocolMessageType('AuthProc', (_message.Message,), {
'DESCRIPTOR' : _AUTHPROC,
'__module__' : 'lte.protos.oai.nas_state_pb2'
# @@protoc_insertion_point(class_scope:magma.lte.oai.AuthProc)
})
_sym_db.RegisterMessage(AuthProc)
SmcProc = _reflection.GeneratedProtocolMessageType('SmcProc', (_message.Message,), {
'DESCRIPTOR' : _SMCPROC,
'__module__' : 'lte.protos.oai.nas_state_pb2'
# @@protoc_insertion_point(class_scope:magma.lte.oai.SmcProc)
})
_sym_db.RegisterMessage(SmcProc)
NasEmmProcWithType = _reflection.GeneratedProtocolMessageType('NasEmmProcWithType', (_message.Message,), {
'DESCRIPTOR' : _NASEMMPROCWITHTYPE,
'__module__' : 'lte.protos.oai.nas_state_pb2'
# @@protoc_insertion_point(class_scope:magma.lte.oai.NasEmmProcWithType)
})
_sym_db.RegisterMessage(NasEmmProcWithType)
NasProcMessSign = _reflection.GeneratedProtocolMessageType('NasProcMessSign', (_message.Message,), {
'DESCRIPTOR' : _NASPROCMESSSIGN,
'__module__' : 'lte.protos.oai.nas_state_pb2'
# @@protoc_insertion_point(class_scope:magma.lte.oai.NasProcMessSign)
})
_sym_db.RegisterMessage(NasProcMessSign)
EmmProcedures = _reflection.GeneratedProtocolMessageType('EmmProcedures', (_message.Message,), {
'DESCRIPTOR' : _EMMPROCEDURES,
'__module__' : 'lte.protos.oai.nas_state_pb2'
# @@protoc_insertion_point(class_scope:magma.lte.oai.EmmProcedures)
})
_sym_db.RegisterMessage(EmmProcedures)
EmmCommonData = _reflection.GeneratedProtocolMessageType('EmmCommonData', (_message.Message,), {
'DESCRIPTOR' : _EMMCOMMONDATA,
'__module__' : 'lte.protos.oai.nas_state_pb2'
# @@protoc_insertion_point(class_scope:magma.lte.oai.EmmCommonData)
})
_sym_db.RegisterMessage(EmmCommonData)
EmmSecurityContext = _reflection.GeneratedProtocolMessageType('EmmSecurityContext', (_message.Message,), {
'Count' : _reflection.GeneratedProtocolMessageType('Count', (_message.Message,), {
'DESCRIPTOR' : _EMMSECURITYCONTEXT_COUNT,
'__module__' : 'lte.protos.oai.nas_state_pb2'
# @@protoc_insertion_point(class_scope:magma.lte.oai.EmmSecurityContext.Count)
})
,
'Capability' : _reflection.GeneratedProtocolMessageType('Capability', (_message.Message,), {
'DESCRIPTOR' : _EMMSECURITYCONTEXT_CAPABILITY,
'__module__' : 'lte.protos.oai.nas_state_pb2'
# @@protoc_insertion_point(class_scope:magma.lte.oai.EmmSecurityContext.Capability)
})
,
'SelectedAlgorithms' : _reflection.GeneratedProtocolMessageType('SelectedAlgorithms', (_message.Message,), {
'DESCRIPTOR' : _EMMSECURITYCONTEXT_SELECTEDALGORITHMS,
'__module__' : 'lte.protos.oai.nas_state_pb2'
# @@protoc_insertion_point(class_scope:magma.lte.oai.EmmSecurityContext.SelectedAlgorithms)
})
,
'DESCRIPTOR' : _EMMSECURITYCONTEXT,
'__module__' : 'lte.protos.oai.nas_state_pb2'
# @@protoc_insertion_point(class_scope:magma.lte.oai.EmmSecurityContext)
})
_sym_db.RegisterMessage(EmmSecurityContext)
_sym_db.RegisterMessage(EmmSecurityContext.Count)
_sym_db.RegisterMessage(EmmSecurityContext.Capability)
_sym_db.RegisterMessage(EmmSecurityContext.SelectedAlgorithms)
AuthVector = _reflection.GeneratedProtocolMessageType('AuthVector', (_message.Message,), {
'DESCRIPTOR' : _AUTHVECTOR,
'__module__' : 'lte.protos.oai.nas_state_pb2'
# @@protoc_insertion_point(class_scope:magma.lte.oai.AuthVector)
})
_sym_db.RegisterMessage(AuthVector)
UeNetworkCapability = _reflection.GeneratedProtocolMessageType('UeNetworkCapability', (_message.Message,), {
'DESCRIPTOR' : _UENETWORKCAPABILITY,
'__module__' : 'lte.protos.oai.nas_state_pb2'
# @@protoc_insertion_point(class_scope:magma.lte.oai.UeNetworkCapability)
})
_sym_db.RegisterMessage(UeNetworkCapability)
UeAdditionalSecurityCapability = _reflection.GeneratedProtocolMessageType('UeAdditionalSecurityCapability', (_message.Message,), {
'DESCRIPTOR' : _UEADDITIONALSECURITYCAPABILITY,
'__module__' : 'lte.protos.oai.nas_state_pb2'
# @@protoc_insertion_point(class_scope:magma.lte.oai.UeAdditionalSecurityCapability)
})
_sym_db.RegisterMessage(UeAdditionalSecurityCapability)
BearerQos = _reflection.GeneratedProtocolMessageType('BearerQos', (_message.Message,), {
'DESCRIPTOR' : _BEARERQOS,
'__module__' : 'lte.protos.oai.nas_state_pb2'
# @@protoc_insertion_point(class_scope:magma.lte.oai.BearerQos)
})
_sym_db.RegisterMessage(BearerQos)
PcoProtocolOrContainerId = _reflection.GeneratedProtocolMessageType('PcoProtocolOrContainerId', (_message.Message,), {
'DESCRIPTOR' : _PCOPROTOCOLORCONTAINERID,
'__module__' : 'lte.protos.oai.nas_state_pb2'
# @@protoc_insertion_point(class_scope:magma.lte.oai.PcoProtocolOrContainerId)
})
_sym_db.RegisterMessage(PcoProtocolOrContainerId)
ProtocolConfigurationOptions = _reflection.GeneratedProtocolMessageType('ProtocolConfigurationOptions', (_message.Message,), {
'DESCRIPTOR' : _PROTOCOLCONFIGURATIONOPTIONS,
'__module__' : 'lte.protos.oai.nas_state_pb2'
# @@protoc_insertion_point(class_scope:magma.lte.oai.ProtocolConfigurationOptions)
})
_sym_db.RegisterMessage(ProtocolConfigurationOptions)
EsmEbrTimerData = _reflection.GeneratedProtocolMessageType('EsmEbrTimerData', (_message.Message,), {
'DESCRIPTOR' : _ESMEBRTIMERDATA,
'__module__' : 'lte.protos.oai.nas_state_pb2'
# @@protoc_insertion_point(class_scope:magma.lte.oai.EsmEbrTimerData)
})
_sym_db.RegisterMessage(EsmEbrTimerData)
EsmProcData = _reflection.GeneratedProtocolMessageType('EsmProcData', (_message.Message,), {
'DESCRIPTOR' : _ESMPROCDATA,
'__module__' : 'lte.protos.oai.nas_state_pb2'
# @@protoc_insertion_point(class_scope:magma.lte.oai.EsmProcData)
})
_sym_db.RegisterMessage(EsmProcData)
EsmContext = _reflection.GeneratedProtocolMessageType('EsmContext', (_message.Message,), {
'DESCRIPTOR' : _ESMCONTEXT,
'__module__' : 'lte.protos.oai.nas_state_pb2'
# @@protoc_insertion_point(class_scope:magma.lte.oai.EsmContext)
})
_sym_db.RegisterMessage(EsmContext)
EsmEbrContext = _reflection.GeneratedProtocolMessageType('EsmEbrContext', (_message.Message,), {
'DESCRIPTOR' : _ESMEBRCONTEXT,
'__module__' : 'lte.protos.oai.nas_state_pb2'
# @@protoc_insertion_point(class_scope:magma.lte.oai.EsmEbrContext)
})
_sym_db.RegisterMessage(EsmEbrContext)
NwDetachData = _reflection.GeneratedProtocolMessageType('NwDetachData', (_message.Message,), {
'DESCRIPTOR' : _NWDETACHDATA,
'__module__' : 'lte.protos.oai.nas_state_pb2'
# @@protoc_insertion_point(class_scope:magma.lte.oai.NwDetachData)
})
_sym_db.RegisterMessage(NwDetachData)
NewAttachInfo = _reflection.GeneratedProtocolMessageType('NewAttachInfo', (_message.Message,), {
'DESCRIPTOR' : _NEWATTACHINFO,
'__module__' : 'lte.protos.oai.nas_state_pb2'
# @@protoc_insertion_point(class_scope:magma.lte.oai.NewAttachInfo)
})
_sym_db.RegisterMessage(NewAttachInfo)
EmmContext = _reflection.GeneratedProtocolMessageType('EmmContext', (_message.Message,), {
'DESCRIPTOR' : _EMMCONTEXT,
'__module__' : 'lte.protos.oai.nas_state_pb2'
# @@protoc_insertion_point(class_scope:magma.lte.oai.EmmContext)
})
_sym_db.RegisterMessage(EmmContext)
DESCRIPTOR._options = None
_UENETWORKCAPABILITY.fields_by_name['spare']._options = None
_UENETWORKCAPABILITY.fields_by_name['misc_present']._options = None
_ESMCONTEXT.fields_by_name['is_standalone']._options = None
# @@protoc_insertion_point(module_scope)
| 50.739169
| 14,905
| 0.757926
| 22,972
| 172,158
| 5.355694
| 0.035565
| 0.055856
| 0.085092
| 0.069348
| 0.83114
| 0.802798
| 0.748096
| 0.706732
| 0.68421
| 0.675301
| 0
| 0.039916
| 0.120465
| 172,158
| 3,392
| 14,906
| 50.754127
| 0.772604
| 0.017397
| 0
| 0.735303
| 1
| 0.001866
| 0.185912
| 0.150587
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| false
| 0
| 0.001866
| 0
| 0.001866
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 1
| 1
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 8
|
e2bab82e2322ece32b98dad470a49c691a2eb339
| 39,497
|
py
|
Python
|
src/strategies.py
|
paulobh/tradingsystem
|
8ba721e4f0f058a5cf8ad0b0db3f5486008787e0
|
[
"RSA-MD"
] | 3
|
2021-02-20T07:20:44.000Z
|
2022-02-15T21:52:56.000Z
|
src/strategies.py
|
paulobh/tradingsystem
|
8ba721e4f0f058a5cf8ad0b0db3f5486008787e0
|
[
"RSA-MD"
] | null | null | null |
src/strategies.py
|
paulobh/tradingsystem
|
8ba721e4f0f058a5cf8ad0b0db3f5486008787e0
|
[
"RSA-MD"
] | 1
|
2022-01-26T01:36:22.000Z
|
2022-01-26T01:36:22.000Z
|
from __future__ import (absolute_import, division, print_function,
unicode_literals)
import argparse
import collections
import datetime
import numpy
# Import the backtrader platform
import backtrader as bt
import quantstats
# from args import parse_args
from src import signals
from time import process_time
# Create a Stratey
class MainStrategy(bt.Strategy):
params = (('plot_entry', True),
('plot_exit', True),
('limdays', 1), #limit of days of alive orders
("printlog", True),
)
def __init__(self, **kwargs):
self.params_opt = kwargs
self.params.__dict__.update(kwargs)
self.__dict__.update(kwargs)
# allowed_keys = {'args', 'period_rsi', 'threshold_buy', 'threshold_sell'}
# self.__dict__.update((k, v) for k, v in kwargs.items() if k in allowed_keys)
# self.tstart = process_time()
self.initial_value = self.broker.getvalue()
# # Activate the fund mode and set the default value at 100
# self.broker.set_fundmode(fundmode=True, fundstartval=100.0)
# self.cash_start = self.broker.get_cash()
# # self.val_start = 100.0
# To keep track of pending orders and buy price / commission.
self.order = None
self.buyprice = None
self.buycomm = None
# Keep a reference to the OHLC line in the data[0] dataseries
self.dataopen = self.datas[0].open
self.datahigh = self.datas[0].high
self.datalow = self.datas[0].low
self.dataclose = self.datas[0].close
# Add indicators signals
self.signal_time = signals.TIMESignal()
# self.atr = signals.ATRSignal(**kwargs)
# self.rsi = signals.RSISignal()
# self.willr = signals.WillRSignal()
# self.signal = getattr(signals, self.signal)(**kwargs)
self.signal_name = list(self.params.signal.keys())[0]
signal_params = list(self.params.signal.values())[0]
self.__dict__.update({"signal": getattr(signals, self.signal_name)(**signal_params)})
self.__dict__.update({self.signal_name+"_atr": signals.ATRSignal(**signal_params)})
self.orefs = list()
self.os = list()
def log(self, txt, dt=None, doprint=True):
"""Logging function for this strategy"""
if self.params.printlog or doprint:
dt = dt or self.datas[0].datetime.datetime(0)
print('%s, %s' % (dt.isoformat(), txt))
def notify_trade(self, trade):
"""Receives a trade whenever there has been a change in one"""
if not trade.isclosed:
return
self.log('OPERATION PROFIT, REF: %s, PRICE: %.2f, GROSS %.2f' %
(trade.ref,
trade.price,
trade.pnl,
))
def notify_order(self, order):
"""Receives an order whenever there has been a change in one"""
# print('{}: Order ref: {} / Type {} / Status {}'.format(
# self.data.datetime.date(0),
# order.ref,
# 'Buy' * order.isbuy() or 'Sell',
# order.getstatusname()))
# Check whether an order has enought Margin to call or was Rejected by the Broker
if order.status in [order.Margin, order.Rejected]:
self.log('REJECTED ORDER. Type: %s, REF: %s, PRICE: %.2f, SIZE: %.2f' %
(order.getstatusname(),
order.ref,
order.executed.price,
order.executed.size,
))
# Check whether an order is Submitted or Accepted by the Broker
if order.status in [order.Submitted, order.Accepted]:
# Order accepted by the broker. Do nothing.
# self.log('ACCEPTED ORDER. STATUS: %s, Type: %s, REF: %s, PRICE: %.2f, SIZE: %.2f' %
# (order.getstatusname(),
# 'Buy' * order.isbuy() or 'Sell',
# order.ref,
# order.executed.price,
# order.executed.size,
# ))
return
elif order.status in [order.Cancelled]:
self.log('CANCEL ORDER. STATUS: %s, Type: %s, REF: %s, PRICE: %.2f, SIZE: %.2f' %
(order.getstatusname(),
'Buy' * order.isbuy() or 'Sell',
order.ref,
order.executed.price,
order.executed.size,
))
# Check if an order is completed
elif order.status in [order.Completed]:
self.log("COMPLETED ORDER. STATUS: %s, Type: %s, REF: %s, PRICE : %.3f, SIZE : %.2f" %
(order.getstatusname(),
'Buy' * order.isbuy() or 'Sell',
order.ref,
order.executed.price,
order.executed.size,
))
# if not order.alive():
# self.order = None # indicate no order is pending
if not order.alive() and order.ref in self.orefs:
self.orefs.remove(order.ref)
def next(self):
"""Simply log the closing price of the series from the reference"""
if self.signal_time.signal[0] == 0:
if self.orefs:
self.log('Out of schedule time of operation, closing operation')
self.close()
[self.cancel(order) for order in self.os if order.ref in self.orefs]
self.orefs = list()
return
elif self.os:
self.log('Out of schedule time of operation')
self.cancel(self.os[0])
self.orefs = list()
return
# Check if an order is in Pending state, if yes, we cannot send a 2nd one
elif self.orefs:
self.log('An order already in Pending state')
return # pending orders do nothing
# Check whether we have an open position already, if no, then we can enter a new position by entering a trade
elif not self.position:
valid = datetime.timedelta(self.params.limdays)
# signal_name, signal_value = self.signal_opt()
if self.signal_time.signal[0] == 0:
self.log('Out of schedule time of operation')
return
# elif signal_value > 0:
elif self.signal.lines.signal[0] > 0:
stop_loss = getattr(self, self.signal_name + "_atr").lines.signal_stopbuy[0]
take_profit = getattr(self, self.signal_name + "_atr").lines.signal_profit_buy[0]
# stop_loss = self.atr.lines.signal_stopbuy[0]
# take_profit = self.atr.lines.signal_profit_buy[0]
self.os = self.buy_bracket(price=None,
valid=valid,
stopprice=stop_loss,
stopargs=dict(valid=valid),
limitprice=take_profit,
limitargs=dict(valid=valid),
)
self.orefs = [o.ref for o in self.os]
# signal_name = [line for line in self.signal.lines.getlinealiases() if line != "signal"][0]
self.log(
'Signal, Close: %.2f, Stop: %.2f, Profit: %.2f, ATR: %.2f, Signal: %.2f, Indicator: %s, Value: %.2f' %
(self.dataclose[0],
stop_loss,
take_profit,
# self.atr.lines.signal[0],
getattr(self, self.signal_name + "_atr").lines.signal[0],
self.signal.lines.signal[0],
# signal_value,
self.signal_name,
getattr(self, "signal")[0])
# getattr(self.signal.lines, signal_name)[0])
)
# elif signal_value < 0:
elif self.signal.lines.signal[0] < 0:
stop_loss = getattr(self, self.signal_name + "_atr").lines.signal_stopsell[0]
take_profit = getattr(self, self.signal_name + "_atr").lines.signal_profit_sell[0]
# stop_loss = self.atr.lines.signal_stopsell[0]
# take_profit = self.atr.lines.signal_profit_sell[0]
self.os = self.sell_bracket(price=None,
valid=valid,
stopprice=stop_loss,
stopargs=dict(valid=valid),
limitprice=take_profit,
limitargs=dict(valid=valid),
)
self.orefs = [o.ref for o in self.os]
# signal_name = [line for line in self.signal.lines.getlinealiases() if line != "signal"][0]
self.log(
'Signal, Close: %.2f, Stop: %.2f, Profit: %.2f, ATR: %.2f, Signal: %.2f, Indicator: %s, Value: %.2f' %
(self.dataclose[0],
stop_loss,
take_profit,
# self.atr.lines.signal[0],
getattr(self, self.signal_name + "_atr").lines.signal[0],
self.signal.lines.signal[0],
# signal_value,
self.signal_name,
getattr(self, "signal")[0])
# getattr(self.signal.lines, signal_name)[0])
)
class TestStrategy(bt.Strategy):
params = (
('maperiod', 15),
('printlog', False),
)
def __init__(self, **kwargs):
self.params_opt = kwargs
self.params.__dict__.update(kwargs)
self.__dict__.update(kwargs)
# allowed_keys = {'args', 'period_rsi', 'threshold_buy', 'threshold_sell'}
# self.__dict__.update((k, v) for k, v in kwargs.items() if k in allowed_keys)
# self.tstart = process_time()
self.initial_value = self.broker.getvalue()
# To keep track of pending orders and buy price/commission
self.order = None
self.buyprice = None
self.buycomm = None
# Keep a reference to the OHLC line in the data[0] dataseries
self.dataopen = self.datas[0].open
self.datahigh = self.datas[0].high
self.datalow = self.datas[0].low
self.dataclose = self.datas[0].close
# Add a MovingAverageSimple indicator
self.sma = bt.indicators.SimpleMovingAverage(
self.datas[0], period=self.params.maperiod)
# self.sma = SMASignal()
self.orefs = list()
def log(self, txt, dt=None, doprint=False):
''' Logging function fot this strategy'''
if self.params.printlog or doprint:
dt = dt or self.datas[0].datetime.date(0)
print('%s, %s' % (dt.isoformat(), txt))
def notify_order(self, order):
if order.status in [order.Submitted, order.Accepted]:
# Buy/Sell order submitted/accepted to/by broker - Nothing to do
return
# Check if an order has been completed
# Attention: broker could reject order if not enough cash
if order.status in [order.Completed]:
if order.isbuy():
self.log(
'BUY EXECUTED, Price: %.2f, Cost: %.2f, Comm %.2f' %
(order.executed.price,
order.executed.value,
order.executed.comm))
self.buyprice = order.executed.price
self.buycomm = order.executed.comm
else: # Sell
self.log('SELL EXECUTED, Price: %.2f, Cost: %.2f, Comm %.2f' %
(order.executed.price,
order.executed.value,
order.executed.comm))
self.bar_executed = len(self)
elif order.status in [order.Canceled, order.Margin, order.Rejected]:
self.log('Order Canceled/Margin/Rejected')
# Write down: no pending order
self.order = None
def notify_trade(self, trade):
if not trade.isclosed:
return
self.log('OPERATION PROFIT, GROSS %.2f, NET %.2f' %
(trade.pnl, trade.pnlcomm))
def next(self):
# Simply log the closing price of the series from the reference
self.log('Close, %.2f' % self.dataclose[0])
# Check if an order is pending ... if yes, we cannot send a 2nd one
if self.order:
return
# if self.orefs:
# print('An order already in Pending state:')
# return # pending orders do nothing
# Check if we are in the market
if not self.position:
# Not yet ... we MIGHT BUY if ...
# if self.sma.lines.signal[0] > 0:
if self.dataclose[0] > self.sma[0]:
# BUY, BUY, BUY!!! (with all possible default parameters)
self.log('BUY CREATE, %.2f' % self.dataclose[0])
# Keep track of the created order to avoid a 2nd order
self.order = self.buy()
else:
if self.dataclose[0] < self.sma[0]:
# elif self.dataclose[0] < self.sma[0]:
# elif self.sma.lines.signal[0] < 0:
# SELL, SELL, SELL!!! (with all possible default parameters)
self.log('SELL CREATE, %.2f' % self.dataclose[0])
# Keep track of the created order to avoid a 2nd order
self.order = self.sell()
def stop(self):
self.log('(MA Period %2d) Ending Value %.2f' %
(self.params.maperiod, self.broker.getvalue()), doprint=True)
class OptStrategy(bt.Strategy):
params = (
('limdays', 1), #limit of days of alive orders
('printlog', True), #very useful to debugging
# ('signal', None),
)
def __init__(self, **kwargs):
self.params_opt = kwargs
self.params.__dict__.update(kwargs)
self.__dict__.update(kwargs)
# allowed_keys = {'args', 'period_rsi', 'threshold_buy', 'threshold_sell'}
# self.__dict__.update((k, v) for k, v in kwargs.items() if k in allowed_keys)
# self.tstart = process_time()
self.initial_value = self.broker.getvalue()
# Activate the fund mode and set the default value at 100
self.broker.set_fundmode(fundmode=True, fundstartval=100.0)
self.cash_start = self.broker.get_cash()
# self.val_start = 100.0
# To keep track of pending orders and buy price / commission.
self.order = None
self.buyprice = None
self.buycomm = None
# Keep a reference to the OHLC line in the data[0] dataseries
self.dataopen = self.datas[0].open
self.datahigh = self.datas[0].high
self.datalow = self.datas[0].low
self.dataclose = self.datas[0].close
# Add indicators signals
self.time_signal = signals.TIMESignal(**kwargs)
self.atr = signals.ATRSignal(**kwargs)
self.signal = getattr(signals, self.signal)(**kwargs)
self.orefs = list()
self.os = list()
def log(self, txt, dt=None, doprint=False):
"""Logging function for this strategy"""
if self.params.printlog or doprint:
dt = dt or self.datas[0].datetime.datetime(0)
print('%s, %s' % (dt.isoformat(), txt))
def notify_trade(self, trade):
"""Receives a trade whenever there has been a change in one"""
if not trade.isclosed:
return
self.log('OPERATION PROFIT, REF: %s, PRICE: %.2f, GROSS %.2f' %
(trade.ref,
trade.price,
trade.pnl,
))
def notify_order(self, order):
"""Receives an order whenever there has been a change in one"""
# Check whether an order has enought Margin to call or was Rejected by the Broker
if order.status in [order.Margin, order.Rejected]:
self.log('REJECTED ORDER. Type: %s, REF: %s, PRICE: %.2f, SIZE: %.2f' %
(order.getstatusname(),
order.ref,
order.executed.price,
order.executed.size,
))
# Check whether an order is Submitted or Accepted by the Broker
if order.status in [order.Submitted, order.Accepted]:
return
elif order.status in [order.Cancelled]:
self.log('CANCEL ORDER. STATUS: %s, Type: %s, REF: %s, PRICE: %.2f, SIZE: %.2f' %
(order.getstatusname(),
'Buy' * order.isbuy() or 'Sell',
order.ref,
order.executed.price,
order.executed.size,
))
# Check if an order is completed
elif order.status in [order.Completed]:
self.log("COMPLETED ORDER. STATUS: %s, Type: %s, REF: %s, PRICE : %.3f, SIZE : %.2f" %
(order.getstatusname(),
'Buy' * order.isbuy() or 'Sell',
order.ref,
order.executed.price,
order.executed.size,
))
if not order.alive() and order.ref in self.orefs:
self.orefs.remove(order.ref)
self.order = None # indicate no order is pending
def next(self):
"""Simply log the closing price of the series from the reference"""
# self.log('Datetime: %s, Open: %.2f, High: %.2f, Low: %.2f, Close: %.2f' %
# (self.data.datetime.datetime(0),
# self.dataopen[0],
# self.datahigh[0],
# self.datalow[0],
# self.dataclose[0]))
if self.time_signal.signal[0] == 0:
if self.orefs:
self.log('Out of schedule time of operation, closing operation')
self.close()
[self.cancel(order) for order in self.os if order.ref in self.orefs]
self.orefs = list()
return
elif self.os:
self.log('Out of schedule time of operation')
self.cancel(self.os[0])
self.orefs = list()
return
# Check if an order is in Pending state, if yes, we cannot send a 2nd one
# if self.order:
# print('An order already in Pending state')
# return
elif self.orefs:
self.log('An order already in Pending state')
return # pending orders do nothing
# Check whether we have an open position already, if no, then we can enter a new position by entering a trade
elif not self.position:
valid = datetime.timedelta(self.params.limdays)
if self.time_signal.signal[0] == 0:
self.log('Out of schedule time of operation')
return
elif self.signal.lines.signal[0] > 0:
stop_loss = self.atr.lines.signal_stopbuy[0]
take_profit = self.atr.lines.signal_profit_buy[0]
self.os = self.buy_bracket(price=None,
valid=valid,
stopprice=stop_loss,
stopargs=dict(valid=valid),
limitprice=take_profit,
limitargs=dict(valid=valid),
)
self.orefs = [o.ref for o in self.os]
signal_name = [line for line in self.signal.lines.getlinealiases() if line != "signal"][0]
self.log(
'Signal, Close: %.2f, Stop: %.2f, Profit: %.2f, ATR: %.2f, Signal: %.2f, Indicator: %s, Value: %.2f' %
(self.dataclose[0],
stop_loss,
take_profit,
self.atr.lines.signal[0],
self.signal.lines.signal[0],
signal_name,
getattr(self.signal.lines, signal_name)[0])
)
elif self.signal.lines.signal[0] < 0:
stop_loss = self.atr.lines.signal_stopsell[0]
take_profit = self.atr.lines.signal_profit_sell[0]
self.os = self.sell_bracket(price=None,
# valid=valid,
stopprice=stop_loss,
# stopargs=dict(valid=valid),
limitprice=take_profit,
# limitargs=dict(valid=valid),
)
self.orefs = [o.ref for o in self.os]
signal_name = [line for line in self.signal.lines.getlinealiases() if line != "signal"][0]
self.log('Signal, Close: %.2f, Stop: %.2f, Profit: %.2f, ATR: %.2f, Signal: %.2f, Indicator: %s, Value: %.2f' %
(self.dataclose[0],
stop_loss,
take_profit,
self.atr.lines.signal[0],
self.signal.lines.signal[0],
signal_name,
getattr(self.signal.lines, signal_name)[0])
)
def stop(self):
# self.tend = process_time()
# time_spent = self.tend - self.tstart
# print('Time used:', str(time_spent))
self.final_value = self.broker.getvalue()
balance = self.final_value - self.initial_value
balancecash = self.broker.getcash() - self.initial_value
# self.roi = (self.broker.get_value() / self.cash_start) - 1.0
# self.froi = self.broker.get_fundvalue() - self.val_start
# params_pop = ['plot_entry', 'plot_exit', 'limdays', 'printlog']
params_pop = ['plot_entry', 'plot_exit', 'limdays']
[self.params_opt.pop(param) for param in params_pop if param in self.params_opt]
self.log(
f'Parameters used: {self.params_opt}| '
# f'ROI: {100.0 * self.roi}| '
# f'Fund Value {self.froi}| '
f'Ending Value: {balance}',
doprint=True)
class SignalsStrategy(bt.Strategy):
params = (('plot_entry', True),
('plot_exit', True),
('printlog', False),
('limdays', 1), # limit of days of alive orders
)
def __init__(self, **kwargs):
self.params_opt = kwargs
self.params.__dict__.update(kwargs)
self.__dict__.update(kwargs)
# allowed_keys = {'args', 'period_rsi', 'threshold_buy', 'threshold_sell'}
# self.__dict__.update((k, v) for k, v in kwargs.items() if k in allowed_keys)
# self.tstart = process_time()
self.initial_value = self.broker.getvalue()
# # Activate the fund mode and set the default value at 100
# self.broker.set_fundmode(fundmode=True, fundstartval=100.0)
# self.cash_start = self.broker.get_cash()
# # self.val_start = 100.0
# To keep track of pending orders and buy price / commission.
self.order = None
self.buyprice = None
self.buycomm = None
# Keep a reference to the OHLC line in the data[0] dataseries
self.dataopen = self.datas[0].open
self.datahigh = self.datas[0].high
self.datalow = self.datas[0].low
self.dataclose = self.datas[0].close
# Add indicators signals
self.signal_time = signals.TIMESignal()
for signal, signal_params in getattr(self.params, "output_train").items():
# for signal, signal_params in self.params.signal.items():
signal_name = "signal_" + signal.split("Signal")[0].lower()
signal_params = getattr(self.params, "output_train").get(signal)
# signal_params = self.params.signal.get(signal)
# self.__dict__.update({signal_name: signals.GenericSignal(signal=signal, **signal_params)})
self.__dict__.update({signal_name: getattr(signals, signal)(**signal_params)})
self.__dict__.update({signal_name+"_atr": signals.ATRSignal(**signal_params)})
# self.atr = signals.ATRSignal()
# self.rsi = signals.RSISignal()
# self.willr = signals.WillRSignal()
# self.signal = getattr(signals, self.signal)(**kwargs)
self.orefs = list()
self.os = list()
def log(self, txt, dt=None, doprint=False):
"""Logging function for this strategy"""
if self.params.printlog or doprint:
dt = dt or self.datas[0].datetime.datetime(0)
print('%s, %s' % (dt.isoformat(), txt))
def notify_trade(self, trade):
"""Receives a trade whenever there has been a change in one"""
if not trade.isclosed:
return
self.log('OPERATION PROFIT, REF: %s, PRICE: %.2f, GROSS %.2f' %
(trade.ref,
trade.price,
trade.pnl,
))
def notify_order(self, order):
"""Receives an order whenever there has been a change in one"""
# print('{}: Order ref: {} / Type {} / Status {}'.format(
# self.data.datetime.date(0),
# order.ref,
# 'Buy' * order.isbuy() or 'Sell',
# order.getstatusname()))
# Check whether an order has enough Margin to call or was Rejected by the Broker
if order.status in [order.Margin, order.Rejected]:
self.log('REJECTED ORDER. Type: %s, REF: %s, PRICE: %.2f, SIZE: %.2f' %
(order.getstatusname(),
order.ref,
order.executed.price,
order.executed.size,
))
# Check whether an order is Submitted or Accepted by the Broker
if order.status in [order.Submitted, order.Accepted]:
# Order accepted by the broker. Do nothing.
# self.log('ACCEPTED ORDER. STATUS: %s, Type: %s, REF: %s, PRICE: %.2f, SIZE: %.2f' %
# (order.getstatusname(),
# 'Buy' * order.isbuy() or 'Sell',
# order.ref,
# order.executed.price,
# order.executed.size,
# ))
return
elif order.status in [order.Cancelled]:
self.log('CANCEL ORDER. STATUS: %s, Type: %s, REF: %s, PRICE: %.2f, SIZE: %.2f' %
(order.getstatusname(),
'Buy' * order.isbuy() or 'Sell',
order.ref,
order.executed.price,
order.executed.size,
))
# Check if an order is completed
elif order.status in [order.Completed]:
self.log("COMPLETED ORDER. STATUS: %s, Type: %s, REF: %s, PRICE : %.3f, SIZE : %.2f" %
(order.getstatusname(),
'Buy' * order.isbuy() or 'Sell',
order.ref,
order.executed.price,
order.executed.size,
))
# if not order.alive():
# self.order = None # indicate no order is pending
if not order.alive() and order.ref in self.orefs:
self.orefs.remove(order.ref)
def signal_opt(self):
signals_all = {key: value for key, value in self.__dict__.items() if "signal_" in key}
signals_atr = {key: value for key, value in signals_all.items() if "_atr" in key}
signals_names = [key.split("_atr")[0] for key in signals_atr.keys()]
signals = {key: value for key, value in signals_all.items() if key in signals_names}
best_score = 0
best_score_signal = ""
analyzer_opt_name = self.params.opt_analyzer.get("analyzer_opt")
for key, value in signals.items():
analyzer_opt_weight = value.params.analyzer_opt.get(analyzer_opt_name)
if analyzer_opt_weight > best_score:
best_score_signal = key
return best_score_signal, getattr(self, best_score_signal).lines.signal[0]
def next(self):
"""Simply log the closing price of the series from the reference"""
if self.signal_time.signal[0] == 0:
if self.orefs:
self.log('Out of schedule time of operation, closing operation')
self.close()
[self.cancel(order) for order in self.os if order.ref in self.orefs]
self.orefs = list()
return
elif self.os:
self.log('Out of schedule time of operation')
self.cancel(self.os[0])
self.orefs = list()
return
# Check if an order is in Pending state, if yes, we cannot send a 2nd one
elif self.orefs:
self.log('An order already in Pending state')
return # pending orders do nothing
# Check whether we have an open position already, if no, then we can enter a new position by entering a trade
elif not self.position:
valid = datetime.timedelta(self.params.limdays)
signal_name, signal_value = self.signal_opt()
if self.signal_time.signal[0] == 0:
self.log('Out of schedule time of operation')
return
elif signal_value > 0:
# elif self.signal.lines.signal[0] > 0:
stop_loss = getattr(self, signal_name + "_atr").lines.signal_stopbuy[0]
take_profit = getattr(self, signal_name + "_atr").lines.signal_profit_buy[0]
# stop_loss = self.atr.lines.signal_stopbuy[0]
# take_profit = self.atr.lines.signal_profit_buy[0]
self.os = self.buy_bracket(price=None,
valid=valid,
stopprice=stop_loss,
stopargs=dict(valid=valid),
limitprice=take_profit,
limitargs=dict(valid=valid),
)
self.orefs = [o.ref for o in self.os]
# signal_name = [line for line in self.signal.lines.getlinealiases() if line != "signal"][0]
self.log(
'Signal, Close: %.2f, Stop: %.2f, Profit: %.2f, ATR: %.2f, Signal: %.2f, Indicator: %s, Value: %.2f' %
(self.dataclose[0],
stop_loss,
take_profit,
# self.atr.lines.signal[0],
getattr(self, signal_name + "_atr").lines.signal[0],
# self.signal.lines.signal[0],
signal_value,
signal_name,
getattr(self, signal_name)[0])
# getattr(self.signal.lines, signal_name)[0])
)
elif signal_value < 0:
# elif self.signal.lines.signal[0] < 0:
stop_loss = getattr(self, signal_name + "_atr").lines.signal_stopsell[0]
take_profit = getattr(self, signal_name + "_atr").lines.signal_profit_sell[0]
# stop_loss = self.atr.lines.signal_stopsell[0]
# take_profit = self.atr.lines.signal_profit_sell[0]
self.os = self.sell_bracket(price=None,
valid=valid,
stopprice=stop_loss,
stopargs=dict(valid=valid),
limitprice=take_profit,
limitargs=dict(valid=valid),
)
self.orefs = [o.ref for o in self.os]
# signal_name = [line for line in self.signal.lines.getlinealiases() if line != "signal"][0]
self.log(
'Signal, Close: %.2f, Stop: %.2f, Profit: %.2f, ATR: %.2f, Signal: %.2f, Indicator: %s, Value: %.2f' %
(self.dataclose[0],
stop_loss,
take_profit,
# self.atr.lines.signal[0],
getattr(self, signal_name + "_atr").lines.signal[0],
# self.signal.lines.signal[0],
signal_value,
signal_name,
getattr(self, signal_name)[0])
# getattr(self.signal.lines, signal_name)[0])
)
def stop(self):
# self.tend = process_time()
# time_spent = self.tend - self.tstart
# print('Time used:', str(time_spent))
self.final_value = self.broker.getvalue()
balance = self.final_value - self.initial_value
balancecash = self.broker.getcash() - self.initial_value
# self.roi = (self.broker.get_value() / self.cash_start) - 1.0
# self.froi = self.broker.get_fundvalue() - self.val_start
# params_pop = ['plot_entry', 'plot_exit', 'limdays', 'printlog']
# params_pop = ['plot_entry', 'plot_exit', 'limdays']
# [self.params_opt.pop(param) for param in params_pop if param in self.params_opt]
self.log(
# f'Parameters used: {(self.params_opt["signal"])}| '
# f'ROI: {100.0 * self.roi}| '
# f'Fund Value {self.froi}| '
f'Ending Value: {balance}',
doprint=True)
class BuyHoldStrategy(bt.Strategy):
params = (('plot_entry', True),
('plot_exit', True),
('printlog', True),
('limdays', 1), # limit of days of alive orders
)
def __init__(self, **kwargs):
self.params_opt = kwargs
self.params.__dict__.update(kwargs)
self.__dict__.update(kwargs)
# allowed_keys = {'args', 'period_rsi', 'threshold_buy', 'threshold_sell'}
# self.__dict__.update((k, v) for k, v in kwargs.items() if k in allowed_keys)
# self.tstart = process_time()
self.initial_value = self.broker.getvalue()
# Activate the fund mode and set the default value at 100
self.broker.set_fundmode(fundmode=True, fundstartval=100.0)
self.cash_start = self.broker.get_cash()
# self.val_start = 100.0
# To keep track of pending orders and buy price / commission.
self.order = None
self.buyprice = None
self.buycomm = None
# Keep a reference to the OHLC line in the data[0] dataseries
self.dataopen = self.datas[0].open
self.datahigh = self.datas[0].high
self.datalow = self.datas[0].low
self.dataclose = self.datas[0].close
# Add indicators signals
# self.signal_time = signals.TIMESignal()
opt_type = self.params_opt.get("opt_type")
signal_params = {
"fromdate": self.params_opt.get(opt_type).get("fromdate"),
"todate": self.params_opt.get(opt_type).get("todate")
}
signal = self.params_opt.get("signal_strategy")
self.signal_name = "signal_" + signal.lower()
self.__dict__.update({self.signal_name: getattr(signals, "".join([signal, "Signal"]))(**signal_params)})
self.orefs = list()
self.os = list()
def log(self, txt, dt=None, doprint=False):
"""Logging function for this strategy"""
if self.params.printlog or doprint:
dt = dt or self.datas[0].datetime.datetime(0)
print('%s, %s' % (dt.isoformat(), txt))
def notify_trade(self, trade):
"""Receives a trade whenever there has been a change in one"""
if not trade.isclosed:
return
self.log('OPERATION PROFIT, REF: %s, PRICE: %.2f, GROSS %.2f' %
(trade.ref,
trade.price,
trade.pnl,
))
def notify_order(self, order):
"""Receives an order whenever there has been a change in one"""
# Check whether an order has enough Margin to call or was Rejected by the Broker
if order.status in [order.Margin, order.Rejected]:
self.log('REJECTED ORDER. Type: %s, REF: %s, PRICE: %.2f, SIZE: %.2f' %
(order.getstatusname(),
order.ref,
order.executed.price,
order.executed.size,
))
# Check whether an order is Submitted or Accepted by the Broker
if order.status in [order.Submitted, order.Accepted]:
return
elif order.status in [order.Cancelled]:
self.log('CANCEL ORDER. STATUS: %s, Type: %s, REF: %s, PRICE: %.2f, SIZE: %.2f' %
(order.getstatusname(),
'Buy' * order.isbuy() or 'Sell',
order.ref,
order.executed.price,
order.executed.size,
))
# Check if an order is completed
elif order.status in [order.Completed]:
self.log("COMPLETED ORDER. STATUS: %s, Type: %s, REF: %s, PRICE : %.3f, SIZE : %.2f" %
(order.getstatusname(),
'Buy' * order.isbuy() or 'Sell',
order.ref,
order.executed.price,
order.executed.size,
))
if not order.alive() and order.ref in self.orefs:
self.orefs.remove(order.ref)
def next(self):
"""Simply log the closing price of the series from the reference"""
# Check if an order is in Pending state, if yes, we cannot send a 2nd one
if self.orefs:
self.log('An order already in Pending state')
return # pending orders do nothing
# Check whether we have an open position already, if no, then we can enter a new position by entering a trade
elif not self.position:
self.signal = getattr(self, self.signal_name)
# valid = datetime.timedelta(self.params.limdays)
if self.signal.lines.signal[0] > 0:
self.os = self.buy()
# self.orefs = [o.ref for o in self.os]
self.log(
'Signal, Close: %.2f, Signal: %.2f, Indicator: %s' %
(self.dataclose[0],
self.signal.lines.signal[0],
self.signal_name)
)
elif self.position:
if self.signal.lines.signal[0] < 0:
self.os = self.sell()
# self.orefs = [o.ref for o in self.os]
self.log('Signal, Close: %.2f,Signal: %.2f, Indicator: %s' %
(self.dataclose[0],
self.signal.lines.signal[0],
self.signal_name)
)
| 42.333333
| 127
| 0.518774
| 4,467
| 39,497
| 4.486009
| 0.064025
| 0.034932
| 0.020959
| 0.023055
| 0.896003
| 0.877938
| 0.867209
| 0.858127
| 0.839563
| 0.833475
| 0
| 0.012895
| 0.36975
| 39,497
| 932
| 128
| 42.378755
| 0.79211
| 0.251336
| 0
| 0.790017
| 0
| 0.030981
| 0.100697
| 0.00082
| 0
| 0
| 0
| 0
| 0
| 1
| 0.049914
| false
| 0
| 0.015491
| 0
| 0.125645
| 0.041308
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 7
|
e2cdf73d88999dacac20f9f2fe3e98cbda317f44
| 168
|
py
|
Python
|
wgm/cli/do/__init__.py
|
Skylaski-VPN/WireGuard-Gateway-Manager
|
5cacbbc2318fdf662cd4793a786e3c7b9b74c5c4
|
[
"MIT"
] | 1
|
2021-11-28T21:26:58.000Z
|
2021-11-28T21:26:58.000Z
|
wgm/cli/do/__init__.py
|
Skylaski-VPN/WireGuard-Gateway-Manager
|
5cacbbc2318fdf662cd4793a786e3c7b9b74c5c4
|
[
"MIT"
] | 2
|
2021-04-07T18:10:07.000Z
|
2021-04-07T21:41:35.000Z
|
wgm/cli/do/__init__.py
|
Skylaski-VPN/WireGuard-Gateway-Manager
|
5cacbbc2318fdf662cd4793a786e3c7b9b74c5c4
|
[
"MIT"
] | 2
|
2021-04-07T16:13:55.000Z
|
2021-04-23T18:33:22.000Z
|
# do
from .do import get_do_droplets
from .do import get_do_images
from .do import get_do_regions
from .do import create_do_droplet
from .do import destroy_do_droplet
| 21
| 34
| 0.827381
| 31
| 168
| 4.16129
| 0.322581
| 0.232558
| 0.465116
| 0.348837
| 0.395349
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.136905
| 168
| 7
| 35
| 24
| 0.889655
| 0.011905
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| true
| 0
| 1
| 0
| 1
| 0
| 0
| 0
| 0
| null | 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 0
| 1
| 0
|
0
| 7
|
39257cc1bf70aa5becedfbd088e2fe2cef0ae63a
| 117,416
|
py
|
Python
|
napalm_yang/models/openconfig/network_instances/network_instance/__init__.py
|
ckishimo/napalm-yang
|
8f2bd907bd3afcde3c2f8e985192de74748baf6c
|
[
"Apache-2.0"
] | 64
|
2016-10-20T15:47:18.000Z
|
2021-11-11T11:57:32.000Z
|
napalm_yang/models/openconfig/network_instances/network_instance/__init__.py
|
ckishimo/napalm-yang
|
8f2bd907bd3afcde3c2f8e985192de74748baf6c
|
[
"Apache-2.0"
] | 126
|
2016-10-05T10:36:14.000Z
|
2019-05-15T08:43:23.000Z
|
napalm_yang/models/openconfig/network_instances/network_instance/__init__.py
|
ckishimo/napalm-yang
|
8f2bd907bd3afcde3c2f8e985192de74748baf6c
|
[
"Apache-2.0"
] | 63
|
2016-11-07T15:23:08.000Z
|
2021-09-22T14:41:16.000Z
|
# -*- coding: utf-8 -*-
from operator import attrgetter
from pyangbind.lib.yangtypes import RestrictedPrecisionDecimalType
from pyangbind.lib.yangtypes import RestrictedClassType
from pyangbind.lib.yangtypes import TypedListType
from pyangbind.lib.yangtypes import YANGBool
from pyangbind.lib.yangtypes import YANGListType
from pyangbind.lib.yangtypes import YANGDynClass
from pyangbind.lib.yangtypes import ReferenceType
from pyangbind.lib.base import PybindBase
from collections import OrderedDict
from decimal import Decimal
from bitarray import bitarray
import six
# PY3 support of some PY2 keywords (needs improved)
if six.PY3:
import builtins as __builtin__
long = int
elif six.PY2:
import __builtin__
from . import fdb
from . import config
from . import state
from . import encapsulation
from . import inter_instance_policies
from . import table_connections
from . import interfaces
from . import tables
from . import connection_points
from . import mpls
from . import segment_routing
from . import vlans
from . import policy_forwarding
from . import afts
from . import protocols
class network_instance(PybindBase):
"""
This class was auto-generated by the PythonClass plugin for PYANG
from YANG module openconfig-network-instance - based on the path /network-instances/network-instance. Each member element of
the container is represented as a class variable - with a specific
YANG type.
YANG Description: Network instances configured on the local system
"""
__slots__ = (
"_path_helper",
"_extmethods",
"__name",
"__fdb",
"__config",
"__state",
"__encapsulation",
"__inter_instance_policies",
"__table_connections",
"__interfaces",
"__tables",
"__connection_points",
"__mpls",
"__segment_routing",
"__vlans",
"__policy_forwarding",
"__afts",
"__protocols",
)
_yang_name = "network-instance"
_pybind_generated_by = "container"
def __init__(self, *args, **kwargs):
self._path_helper = False
self._extmethods = False
self.__name = YANGDynClass(
base=six.text_type,
is_leaf=True,
yang_name="name",
parent=self,
path_helper=self._path_helper,
extmethods=self._extmethods,
register_paths=True,
is_keyval=True,
namespace="http://openconfig.net/yang/network-instance",
defining_module="openconfig-network-instance",
yang_type="leafref",
is_config=True,
)
self.__fdb = YANGDynClass(
base=fdb.fdb,
is_container="container",
yang_name="fdb",
parent=self,
path_helper=self._path_helper,
extmethods=self._extmethods,
register_paths=True,
extensions=None,
namespace="http://openconfig.net/yang/network-instance",
defining_module="openconfig-network-instance",
yang_type="container",
is_config=True,
)
self.__config = YANGDynClass(
base=config.config,
is_container="container",
yang_name="config",
parent=self,
path_helper=self._path_helper,
extmethods=self._extmethods,
register_paths=True,
extensions=None,
namespace="http://openconfig.net/yang/network-instance",
defining_module="openconfig-network-instance",
yang_type="container",
is_config=True,
)
self.__state = YANGDynClass(
base=state.state,
is_container="container",
yang_name="state",
parent=self,
path_helper=self._path_helper,
extmethods=self._extmethods,
register_paths=True,
extensions=None,
namespace="http://openconfig.net/yang/network-instance",
defining_module="openconfig-network-instance",
yang_type="container",
is_config=True,
)
self.__encapsulation = YANGDynClass(
base=encapsulation.encapsulation,
is_container="container",
yang_name="encapsulation",
parent=self,
path_helper=self._path_helper,
extmethods=self._extmethods,
register_paths=True,
extensions=None,
namespace="http://openconfig.net/yang/network-instance",
defining_module="openconfig-network-instance",
yang_type="container",
is_config=True,
)
self.__inter_instance_policies = YANGDynClass(
base=inter_instance_policies.inter_instance_policies,
is_container="container",
yang_name="inter-instance-policies",
parent=self,
path_helper=self._path_helper,
extmethods=self._extmethods,
register_paths=True,
extensions=None,
namespace="http://openconfig.net/yang/network-instance",
defining_module="openconfig-network-instance",
yang_type="container",
is_config=True,
)
self.__table_connections = YANGDynClass(
base=table_connections.table_connections,
is_container="container",
yang_name="table-connections",
parent=self,
path_helper=self._path_helper,
extmethods=self._extmethods,
register_paths=True,
extensions=None,
namespace="http://openconfig.net/yang/network-instance",
defining_module="openconfig-network-instance",
yang_type="container",
is_config=True,
)
self.__interfaces = YANGDynClass(
base=interfaces.interfaces,
is_container="container",
yang_name="interfaces",
parent=self,
path_helper=self._path_helper,
extmethods=self._extmethods,
register_paths=True,
extensions=None,
namespace="http://openconfig.net/yang/network-instance",
defining_module="openconfig-network-instance",
yang_type="container",
is_config=True,
)
self.__tables = YANGDynClass(
base=tables.tables,
is_container="container",
yang_name="tables",
parent=self,
path_helper=self._path_helper,
extmethods=self._extmethods,
register_paths=True,
extensions=None,
namespace="http://openconfig.net/yang/network-instance",
defining_module="openconfig-network-instance",
yang_type="container",
is_config=True,
)
self.__connection_points = YANGDynClass(
base=connection_points.connection_points,
is_container="container",
yang_name="connection-points",
parent=self,
path_helper=self._path_helper,
extmethods=self._extmethods,
register_paths=True,
extensions=None,
namespace="http://openconfig.net/yang/network-instance",
defining_module="openconfig-network-instance",
yang_type="container",
is_config=True,
)
self.__mpls = YANGDynClass(
base=mpls.mpls,
is_container="container",
yang_name="mpls",
parent=self,
path_helper=self._path_helper,
extmethods=self._extmethods,
register_paths=True,
extensions=None,
namespace="http://openconfig.net/yang/network-instance",
defining_module="openconfig-network-instance",
yang_type="container",
is_config=True,
)
self.__segment_routing = YANGDynClass(
base=segment_routing.segment_routing,
is_container="container",
yang_name="segment-routing",
parent=self,
path_helper=self._path_helper,
extmethods=self._extmethods,
register_paths=True,
extensions=None,
namespace="http://openconfig.net/yang/network-instance",
defining_module="openconfig-network-instance",
yang_type="container",
is_config=True,
)
self.__vlans = YANGDynClass(
base=vlans.vlans,
is_container="container",
yang_name="vlans",
parent=self,
path_helper=self._path_helper,
extmethods=self._extmethods,
register_paths=True,
extensions=None,
namespace="http://openconfig.net/yang/network-instance",
defining_module="openconfig-network-instance",
yang_type="container",
is_config=True,
)
self.__policy_forwarding = YANGDynClass(
base=policy_forwarding.policy_forwarding,
is_container="container",
yang_name="policy-forwarding",
parent=self,
path_helper=self._path_helper,
extmethods=self._extmethods,
register_paths=True,
extensions=None,
namespace="http://openconfig.net/yang/network-instance",
defining_module="openconfig-network-instance",
yang_type="container",
is_config=True,
)
self.__afts = YANGDynClass(
base=afts.afts,
is_container="container",
yang_name="afts",
parent=self,
path_helper=self._path_helper,
extmethods=self._extmethods,
register_paths=True,
extensions=None,
namespace="http://openconfig.net/yang/network-instance",
defining_module="openconfig-network-instance",
yang_type="container",
is_config=True,
)
self.__protocols = YANGDynClass(
base=protocols.protocols,
is_container="container",
yang_name="protocols",
parent=self,
path_helper=self._path_helper,
extmethods=self._extmethods,
register_paths=True,
extensions=None,
namespace="http://openconfig.net/yang/network-instance",
defining_module="openconfig-network-instance",
yang_type="container",
is_config=True,
)
load = kwargs.pop("load", None)
if args:
if len(args) > 1:
raise TypeError("cannot create a YANG container with >1 argument")
all_attr = True
for e in self._pyangbind_elements:
if not hasattr(args[0], e):
all_attr = False
break
if not all_attr:
raise ValueError("Supplied object did not have the correct attributes")
for e in self._pyangbind_elements:
nobj = getattr(args[0], e)
if nobj._changed() is False:
continue
setmethod = getattr(self, "_set_%s" % e)
if load is None:
setmethod(getattr(args[0], e))
else:
setmethod(getattr(args[0], e), load=load)
def _path(self):
if hasattr(self, "_parent"):
return self._parent._path() + [self._yang_name]
else:
return ["network-instances", "network-instance"]
def _get_name(self):
"""
Getter method for name, mapped from YANG variable /network_instances/network_instance/name (leafref)
YANG Description: A unique name identifying the network instance
"""
return self.__name
def _set_name(self, v, load=False):
"""
Setter method for name, mapped from YANG variable /network_instances/network_instance/name (leafref)
If this variable is read-only (config: false) in the
source YANG file, then _set_name is considered as a private
method. Backends looking to populate this variable should
do so via calling thisObj._set_name() directly.
YANG Description: A unique name identifying the network instance
"""
parent = getattr(self, "_parent", None)
if parent is not None and load is False:
raise AttributeError(
"Cannot set keys directly when" + " within an instantiated list"
)
if hasattr(v, "_utype"):
v = v._utype(v)
try:
t = YANGDynClass(
v,
base=six.text_type,
is_leaf=True,
yang_name="name",
parent=self,
path_helper=self._path_helper,
extmethods=self._extmethods,
register_paths=True,
is_keyval=True,
namespace="http://openconfig.net/yang/network-instance",
defining_module="openconfig-network-instance",
yang_type="leafref",
is_config=True,
)
except (TypeError, ValueError):
raise ValueError(
{
"error-string": """name must be of a type compatible with leafref""",
"defined-type": "leafref",
"generated-type": """YANGDynClass(base=six.text_type, is_leaf=True, yang_name="name", parent=self, path_helper=self._path_helper, extmethods=self._extmethods, register_paths=True, is_keyval=True, namespace='http://openconfig.net/yang/network-instance', defining_module='openconfig-network-instance', yang_type='leafref', is_config=True)""",
}
)
self.__name = t
if hasattr(self, "_set"):
self._set()
def _unset_name(self):
self.__name = YANGDynClass(
base=six.text_type,
is_leaf=True,
yang_name="name",
parent=self,
path_helper=self._path_helper,
extmethods=self._extmethods,
register_paths=True,
is_keyval=True,
namespace="http://openconfig.net/yang/network-instance",
defining_module="openconfig-network-instance",
yang_type="leafref",
is_config=True,
)
def _get_fdb(self):
"""
Getter method for fdb, mapped from YANG variable /network_instances/network_instance/fdb (container)
YANG Description: Operational state and configuration parameters relating to
the forwarding database of the network instance
"""
return self.__fdb
def _set_fdb(self, v, load=False):
"""
Setter method for fdb, mapped from YANG variable /network_instances/network_instance/fdb (container)
If this variable is read-only (config: false) in the
source YANG file, then _set_fdb is considered as a private
method. Backends looking to populate this variable should
do so via calling thisObj._set_fdb() directly.
YANG Description: Operational state and configuration parameters relating to
the forwarding database of the network instance
"""
if hasattr(v, "_utype"):
v = v._utype(v)
try:
t = YANGDynClass(
v,
base=fdb.fdb,
is_container="container",
yang_name="fdb",
parent=self,
path_helper=self._path_helper,
extmethods=self._extmethods,
register_paths=True,
extensions=None,
namespace="http://openconfig.net/yang/network-instance",
defining_module="openconfig-network-instance",
yang_type="container",
is_config=True,
)
except (TypeError, ValueError):
raise ValueError(
{
"error-string": """fdb must be of a type compatible with container""",
"defined-type": "container",
"generated-type": """YANGDynClass(base=fdb.fdb, is_container='container', yang_name="fdb", parent=self, path_helper=self._path_helper, extmethods=self._extmethods, register_paths=True, extensions=None, namespace='http://openconfig.net/yang/network-instance', defining_module='openconfig-network-instance', yang_type='container', is_config=True)""",
}
)
self.__fdb = t
if hasattr(self, "_set"):
self._set()
def _unset_fdb(self):
self.__fdb = YANGDynClass(
base=fdb.fdb,
is_container="container",
yang_name="fdb",
parent=self,
path_helper=self._path_helper,
extmethods=self._extmethods,
register_paths=True,
extensions=None,
namespace="http://openconfig.net/yang/network-instance",
defining_module="openconfig-network-instance",
yang_type="container",
is_config=True,
)
def _get_config(self):
"""
Getter method for config, mapped from YANG variable /network_instances/network_instance/config (container)
YANG Description: Configuration parameters relating to a network
instance
"""
return self.__config
def _set_config(self, v, load=False):
"""
Setter method for config, mapped from YANG variable /network_instances/network_instance/config (container)
If this variable is read-only (config: false) in the
source YANG file, then _set_config is considered as a private
method. Backends looking to populate this variable should
do so via calling thisObj._set_config() directly.
YANG Description: Configuration parameters relating to a network
instance
"""
if hasattr(v, "_utype"):
v = v._utype(v)
try:
t = YANGDynClass(
v,
base=config.config,
is_container="container",
yang_name="config",
parent=self,
path_helper=self._path_helper,
extmethods=self._extmethods,
register_paths=True,
extensions=None,
namespace="http://openconfig.net/yang/network-instance",
defining_module="openconfig-network-instance",
yang_type="container",
is_config=True,
)
except (TypeError, ValueError):
raise ValueError(
{
"error-string": """config must be of a type compatible with container""",
"defined-type": "container",
"generated-type": """YANGDynClass(base=config.config, is_container='container', yang_name="config", parent=self, path_helper=self._path_helper, extmethods=self._extmethods, register_paths=True, extensions=None, namespace='http://openconfig.net/yang/network-instance', defining_module='openconfig-network-instance', yang_type='container', is_config=True)""",
}
)
self.__config = t
if hasattr(self, "_set"):
self._set()
def _unset_config(self):
self.__config = YANGDynClass(
base=config.config,
is_container="container",
yang_name="config",
parent=self,
path_helper=self._path_helper,
extmethods=self._extmethods,
register_paths=True,
extensions=None,
namespace="http://openconfig.net/yang/network-instance",
defining_module="openconfig-network-instance",
yang_type="container",
is_config=True,
)
def _get_state(self):
"""
Getter method for state, mapped from YANG variable /network_instances/network_instance/state (container)
YANG Description: Operational state parameters relating to a network
instance
"""
return self.__state
def _set_state(self, v, load=False):
"""
Setter method for state, mapped from YANG variable /network_instances/network_instance/state (container)
If this variable is read-only (config: false) in the
source YANG file, then _set_state is considered as a private
method. Backends looking to populate this variable should
do so via calling thisObj._set_state() directly.
YANG Description: Operational state parameters relating to a network
instance
"""
if hasattr(v, "_utype"):
v = v._utype(v)
try:
t = YANGDynClass(
v,
base=state.state,
is_container="container",
yang_name="state",
parent=self,
path_helper=self._path_helper,
extmethods=self._extmethods,
register_paths=True,
extensions=None,
namespace="http://openconfig.net/yang/network-instance",
defining_module="openconfig-network-instance",
yang_type="container",
is_config=True,
)
except (TypeError, ValueError):
raise ValueError(
{
"error-string": """state must be of a type compatible with container""",
"defined-type": "container",
"generated-type": """YANGDynClass(base=state.state, is_container='container', yang_name="state", parent=self, path_helper=self._path_helper, extmethods=self._extmethods, register_paths=True, extensions=None, namespace='http://openconfig.net/yang/network-instance', defining_module='openconfig-network-instance', yang_type='container', is_config=True)""",
}
)
self.__state = t
if hasattr(self, "_set"):
self._set()
def _unset_state(self):
self.__state = YANGDynClass(
base=state.state,
is_container="container",
yang_name="state",
parent=self,
path_helper=self._path_helper,
extmethods=self._extmethods,
register_paths=True,
extensions=None,
namespace="http://openconfig.net/yang/network-instance",
defining_module="openconfig-network-instance",
yang_type="container",
is_config=True,
)
def _get_encapsulation(self):
"""
Getter method for encapsulation, mapped from YANG variable /network_instances/network_instance/encapsulation (container)
YANG Description: Configuration parameters relating to the encapsulation
used for the network instance
"""
return self.__encapsulation
def _set_encapsulation(self, v, load=False):
"""
Setter method for encapsulation, mapped from YANG variable /network_instances/network_instance/encapsulation (container)
If this variable is read-only (config: false) in the
source YANG file, then _set_encapsulation is considered as a private
method. Backends looking to populate this variable should
do so via calling thisObj._set_encapsulation() directly.
YANG Description: Configuration parameters relating to the encapsulation
used for the network instance
"""
if hasattr(v, "_utype"):
v = v._utype(v)
try:
t = YANGDynClass(
v,
base=encapsulation.encapsulation,
is_container="container",
yang_name="encapsulation",
parent=self,
path_helper=self._path_helper,
extmethods=self._extmethods,
register_paths=True,
extensions=None,
namespace="http://openconfig.net/yang/network-instance",
defining_module="openconfig-network-instance",
yang_type="container",
is_config=True,
)
except (TypeError, ValueError):
raise ValueError(
{
"error-string": """encapsulation must be of a type compatible with container""",
"defined-type": "container",
"generated-type": """YANGDynClass(base=encapsulation.encapsulation, is_container='container', yang_name="encapsulation", parent=self, path_helper=self._path_helper, extmethods=self._extmethods, register_paths=True, extensions=None, namespace='http://openconfig.net/yang/network-instance', defining_module='openconfig-network-instance', yang_type='container', is_config=True)""",
}
)
self.__encapsulation = t
if hasattr(self, "_set"):
self._set()
def _unset_encapsulation(self):
self.__encapsulation = YANGDynClass(
base=encapsulation.encapsulation,
is_container="container",
yang_name="encapsulation",
parent=self,
path_helper=self._path_helper,
extmethods=self._extmethods,
register_paths=True,
extensions=None,
namespace="http://openconfig.net/yang/network-instance",
defining_module="openconfig-network-instance",
yang_type="container",
is_config=True,
)
def _get_inter_instance_policies(self):
"""
Getter method for inter_instance_policies, mapped from YANG variable /network_instances/network_instance/inter_instance_policies (container)
YANG Description: Policies dictating how RIB or FIB entries are imported
to and exported from this instance
"""
return self.__inter_instance_policies
def _set_inter_instance_policies(self, v, load=False):
"""
Setter method for inter_instance_policies, mapped from YANG variable /network_instances/network_instance/inter_instance_policies (container)
If this variable is read-only (config: false) in the
source YANG file, then _set_inter_instance_policies is considered as a private
method. Backends looking to populate this variable should
do so via calling thisObj._set_inter_instance_policies() directly.
YANG Description: Policies dictating how RIB or FIB entries are imported
to and exported from this instance
"""
if hasattr(v, "_utype"):
v = v._utype(v)
try:
t = YANGDynClass(
v,
base=inter_instance_policies.inter_instance_policies,
is_container="container",
yang_name="inter-instance-policies",
parent=self,
path_helper=self._path_helper,
extmethods=self._extmethods,
register_paths=True,
extensions=None,
namespace="http://openconfig.net/yang/network-instance",
defining_module="openconfig-network-instance",
yang_type="container",
is_config=True,
)
except (TypeError, ValueError):
raise ValueError(
{
"error-string": """inter_instance_policies must be of a type compatible with container""",
"defined-type": "container",
"generated-type": """YANGDynClass(base=inter_instance_policies.inter_instance_policies, is_container='container', yang_name="inter-instance-policies", parent=self, path_helper=self._path_helper, extmethods=self._extmethods, register_paths=True, extensions=None, namespace='http://openconfig.net/yang/network-instance', defining_module='openconfig-network-instance', yang_type='container', is_config=True)""",
}
)
self.__inter_instance_policies = t
if hasattr(self, "_set"):
self._set()
def _unset_inter_instance_policies(self):
self.__inter_instance_policies = YANGDynClass(
base=inter_instance_policies.inter_instance_policies,
is_container="container",
yang_name="inter-instance-policies",
parent=self,
path_helper=self._path_helper,
extmethods=self._extmethods,
register_paths=True,
extensions=None,
namespace="http://openconfig.net/yang/network-instance",
defining_module="openconfig-network-instance",
yang_type="container",
is_config=True,
)
def _get_table_connections(self):
"""
Getter method for table_connections, mapped from YANG variable /network_instances/network_instance/table_connections (container)
YANG Description: Policies dictating how RIB or FIB entries are propagated
between tables
"""
return self.__table_connections
def _set_table_connections(self, v, load=False):
"""
Setter method for table_connections, mapped from YANG variable /network_instances/network_instance/table_connections (container)
If this variable is read-only (config: false) in the
source YANG file, then _set_table_connections is considered as a private
method. Backends looking to populate this variable should
do so via calling thisObj._set_table_connections() directly.
YANG Description: Policies dictating how RIB or FIB entries are propagated
between tables
"""
if hasattr(v, "_utype"):
v = v._utype(v)
try:
t = YANGDynClass(
v,
base=table_connections.table_connections,
is_container="container",
yang_name="table-connections",
parent=self,
path_helper=self._path_helper,
extmethods=self._extmethods,
register_paths=True,
extensions=None,
namespace="http://openconfig.net/yang/network-instance",
defining_module="openconfig-network-instance",
yang_type="container",
is_config=True,
)
except (TypeError, ValueError):
raise ValueError(
{
"error-string": """table_connections must be of a type compatible with container""",
"defined-type": "container",
"generated-type": """YANGDynClass(base=table_connections.table_connections, is_container='container', yang_name="table-connections", parent=self, path_helper=self._path_helper, extmethods=self._extmethods, register_paths=True, extensions=None, namespace='http://openconfig.net/yang/network-instance', defining_module='openconfig-network-instance', yang_type='container', is_config=True)""",
}
)
self.__table_connections = t
if hasattr(self, "_set"):
self._set()
def _unset_table_connections(self):
self.__table_connections = YANGDynClass(
base=table_connections.table_connections,
is_container="container",
yang_name="table-connections",
parent=self,
path_helper=self._path_helper,
extmethods=self._extmethods,
register_paths=True,
extensions=None,
namespace="http://openconfig.net/yang/network-instance",
defining_module="openconfig-network-instance",
yang_type="container",
is_config=True,
)
def _get_interfaces(self):
"""
Getter method for interfaces, mapped from YANG variable /network_instances/network_instance/interfaces (container)
YANG Description: The interfaces that are associated with this network
instance
"""
return self.__interfaces
def _set_interfaces(self, v, load=False):
"""
Setter method for interfaces, mapped from YANG variable /network_instances/network_instance/interfaces (container)
If this variable is read-only (config: false) in the
source YANG file, then _set_interfaces is considered as a private
method. Backends looking to populate this variable should
do so via calling thisObj._set_interfaces() directly.
YANG Description: The interfaces that are associated with this network
instance
"""
if hasattr(v, "_utype"):
v = v._utype(v)
try:
t = YANGDynClass(
v,
base=interfaces.interfaces,
is_container="container",
yang_name="interfaces",
parent=self,
path_helper=self._path_helper,
extmethods=self._extmethods,
register_paths=True,
extensions=None,
namespace="http://openconfig.net/yang/network-instance",
defining_module="openconfig-network-instance",
yang_type="container",
is_config=True,
)
except (TypeError, ValueError):
raise ValueError(
{
"error-string": """interfaces must be of a type compatible with container""",
"defined-type": "container",
"generated-type": """YANGDynClass(base=interfaces.interfaces, is_container='container', yang_name="interfaces", parent=self, path_helper=self._path_helper, extmethods=self._extmethods, register_paths=True, extensions=None, namespace='http://openconfig.net/yang/network-instance', defining_module='openconfig-network-instance', yang_type='container', is_config=True)""",
}
)
self.__interfaces = t
if hasattr(self, "_set"):
self._set()
def _unset_interfaces(self):
self.__interfaces = YANGDynClass(
base=interfaces.interfaces,
is_container="container",
yang_name="interfaces",
parent=self,
path_helper=self._path_helper,
extmethods=self._extmethods,
register_paths=True,
extensions=None,
namespace="http://openconfig.net/yang/network-instance",
defining_module="openconfig-network-instance",
yang_type="container",
is_config=True,
)
def _get_tables(self):
"""
Getter method for tables, mapped from YANG variable /network_instances/network_instance/tables (container)
YANG Description: The routing tables that are managed by this network
instance
"""
return self.__tables
def _set_tables(self, v, load=False):
"""
Setter method for tables, mapped from YANG variable /network_instances/network_instance/tables (container)
If this variable is read-only (config: false) in the
source YANG file, then _set_tables is considered as a private
method. Backends looking to populate this variable should
do so via calling thisObj._set_tables() directly.
YANG Description: The routing tables that are managed by this network
instance
"""
if hasattr(v, "_utype"):
v = v._utype(v)
try:
t = YANGDynClass(
v,
base=tables.tables,
is_container="container",
yang_name="tables",
parent=self,
path_helper=self._path_helper,
extmethods=self._extmethods,
register_paths=True,
extensions=None,
namespace="http://openconfig.net/yang/network-instance",
defining_module="openconfig-network-instance",
yang_type="container",
is_config=True,
)
except (TypeError, ValueError):
raise ValueError(
{
"error-string": """tables must be of a type compatible with container""",
"defined-type": "container",
"generated-type": """YANGDynClass(base=tables.tables, is_container='container', yang_name="tables", parent=self, path_helper=self._path_helper, extmethods=self._extmethods, register_paths=True, extensions=None, namespace='http://openconfig.net/yang/network-instance', defining_module='openconfig-network-instance', yang_type='container', is_config=True)""",
}
)
self.__tables = t
if hasattr(self, "_set"):
self._set()
def _unset_tables(self):
self.__tables = YANGDynClass(
base=tables.tables,
is_container="container",
yang_name="tables",
parent=self,
path_helper=self._path_helper,
extmethods=self._extmethods,
register_paths=True,
extensions=None,
namespace="http://openconfig.net/yang/network-instance",
defining_module="openconfig-network-instance",
yang_type="container",
is_config=True,
)
def _get_connection_points(self):
"""
Getter method for connection_points, mapped from YANG variable /network_instances/network_instance/connection_points (container)
YANG Description: The set of connection points within a forwarding
instance
"""
return self.__connection_points
def _set_connection_points(self, v, load=False):
"""
Setter method for connection_points, mapped from YANG variable /network_instances/network_instance/connection_points (container)
If this variable is read-only (config: false) in the
source YANG file, then _set_connection_points is considered as a private
method. Backends looking to populate this variable should
do so via calling thisObj._set_connection_points() directly.
YANG Description: The set of connection points within a forwarding
instance
"""
if hasattr(v, "_utype"):
v = v._utype(v)
try:
t = YANGDynClass(
v,
base=connection_points.connection_points,
is_container="container",
yang_name="connection-points",
parent=self,
path_helper=self._path_helper,
extmethods=self._extmethods,
register_paths=True,
extensions=None,
namespace="http://openconfig.net/yang/network-instance",
defining_module="openconfig-network-instance",
yang_type="container",
is_config=True,
)
except (TypeError, ValueError):
raise ValueError(
{
"error-string": """connection_points must be of a type compatible with container""",
"defined-type": "container",
"generated-type": """YANGDynClass(base=connection_points.connection_points, is_container='container', yang_name="connection-points", parent=self, path_helper=self._path_helper, extmethods=self._extmethods, register_paths=True, extensions=None, namespace='http://openconfig.net/yang/network-instance', defining_module='openconfig-network-instance', yang_type='container', is_config=True)""",
}
)
self.__connection_points = t
if hasattr(self, "_set"):
self._set()
def _unset_connection_points(self):
self.__connection_points = YANGDynClass(
base=connection_points.connection_points,
is_container="container",
yang_name="connection-points",
parent=self,
path_helper=self._path_helper,
extmethods=self._extmethods,
register_paths=True,
extensions=None,
namespace="http://openconfig.net/yang/network-instance",
defining_module="openconfig-network-instance",
yang_type="container",
is_config=True,
)
def _get_mpls(self):
"""
Getter method for mpls, mapped from YANG variable /network_instances/network_instance/mpls (container)
YANG Description: Anchor point for mpls configuration and operational
data
"""
return self.__mpls
def _set_mpls(self, v, load=False):
"""
Setter method for mpls, mapped from YANG variable /network_instances/network_instance/mpls (container)
If this variable is read-only (config: false) in the
source YANG file, then _set_mpls is considered as a private
method. Backends looking to populate this variable should
do so via calling thisObj._set_mpls() directly.
YANG Description: Anchor point for mpls configuration and operational
data
"""
if hasattr(v, "_utype"):
v = v._utype(v)
try:
t = YANGDynClass(
v,
base=mpls.mpls,
is_container="container",
yang_name="mpls",
parent=self,
path_helper=self._path_helper,
extmethods=self._extmethods,
register_paths=True,
extensions=None,
namespace="http://openconfig.net/yang/network-instance",
defining_module="openconfig-network-instance",
yang_type="container",
is_config=True,
)
except (TypeError, ValueError):
raise ValueError(
{
"error-string": """mpls must be of a type compatible with container""",
"defined-type": "container",
"generated-type": """YANGDynClass(base=mpls.mpls, is_container='container', yang_name="mpls", parent=self, path_helper=self._path_helper, extmethods=self._extmethods, register_paths=True, extensions=None, namespace='http://openconfig.net/yang/network-instance', defining_module='openconfig-network-instance', yang_type='container', is_config=True)""",
}
)
self.__mpls = t
if hasattr(self, "_set"):
self._set()
def _unset_mpls(self):
self.__mpls = YANGDynClass(
base=mpls.mpls,
is_container="container",
yang_name="mpls",
parent=self,
path_helper=self._path_helper,
extmethods=self._extmethods,
register_paths=True,
extensions=None,
namespace="http://openconfig.net/yang/network-instance",
defining_module="openconfig-network-instance",
yang_type="container",
is_config=True,
)
def _get_segment_routing(self):
"""
Getter method for segment_routing, mapped from YANG variable /network_instances/network_instance/segment_routing (container)
YANG Description: Configuration and operational state parameters relating to
segment routing.
"""
return self.__segment_routing
def _set_segment_routing(self, v, load=False):
"""
Setter method for segment_routing, mapped from YANG variable /network_instances/network_instance/segment_routing (container)
If this variable is read-only (config: false) in the
source YANG file, then _set_segment_routing is considered as a private
method. Backends looking to populate this variable should
do so via calling thisObj._set_segment_routing() directly.
YANG Description: Configuration and operational state parameters relating to
segment routing.
"""
if hasattr(v, "_utype"):
v = v._utype(v)
try:
t = YANGDynClass(
v,
base=segment_routing.segment_routing,
is_container="container",
yang_name="segment-routing",
parent=self,
path_helper=self._path_helper,
extmethods=self._extmethods,
register_paths=True,
extensions=None,
namespace="http://openconfig.net/yang/network-instance",
defining_module="openconfig-network-instance",
yang_type="container",
is_config=True,
)
except (TypeError, ValueError):
raise ValueError(
{
"error-string": """segment_routing must be of a type compatible with container""",
"defined-type": "container",
"generated-type": """YANGDynClass(base=segment_routing.segment_routing, is_container='container', yang_name="segment-routing", parent=self, path_helper=self._path_helper, extmethods=self._extmethods, register_paths=True, extensions=None, namespace='http://openconfig.net/yang/network-instance', defining_module='openconfig-network-instance', yang_type='container', is_config=True)""",
}
)
self.__segment_routing = t
if hasattr(self, "_set"):
self._set()
def _unset_segment_routing(self):
self.__segment_routing = YANGDynClass(
base=segment_routing.segment_routing,
is_container="container",
yang_name="segment-routing",
parent=self,
path_helper=self._path_helper,
extmethods=self._extmethods,
register_paths=True,
extensions=None,
namespace="http://openconfig.net/yang/network-instance",
defining_module="openconfig-network-instance",
yang_type="container",
is_config=True,
)
def _get_vlans(self):
"""
Getter method for vlans, mapped from YANG variable /network_instances/network_instance/vlans (container)
YANG Description: Container for VLAN configuration and state
variables
"""
return self.__vlans
def _set_vlans(self, v, load=False):
"""
Setter method for vlans, mapped from YANG variable /network_instances/network_instance/vlans (container)
If this variable is read-only (config: false) in the
source YANG file, then _set_vlans is considered as a private
method. Backends looking to populate this variable should
do so via calling thisObj._set_vlans() directly.
YANG Description: Container for VLAN configuration and state
variables
"""
if hasattr(v, "_utype"):
v = v._utype(v)
try:
t = YANGDynClass(
v,
base=vlans.vlans,
is_container="container",
yang_name="vlans",
parent=self,
path_helper=self._path_helper,
extmethods=self._extmethods,
register_paths=True,
extensions=None,
namespace="http://openconfig.net/yang/network-instance",
defining_module="openconfig-network-instance",
yang_type="container",
is_config=True,
)
except (TypeError, ValueError):
raise ValueError(
{
"error-string": """vlans must be of a type compatible with container""",
"defined-type": "container",
"generated-type": """YANGDynClass(base=vlans.vlans, is_container='container', yang_name="vlans", parent=self, path_helper=self._path_helper, extmethods=self._extmethods, register_paths=True, extensions=None, namespace='http://openconfig.net/yang/network-instance', defining_module='openconfig-network-instance', yang_type='container', is_config=True)""",
}
)
self.__vlans = t
if hasattr(self, "_set"):
self._set()
def _unset_vlans(self):
self.__vlans = YANGDynClass(
base=vlans.vlans,
is_container="container",
yang_name="vlans",
parent=self,
path_helper=self._path_helper,
extmethods=self._extmethods,
register_paths=True,
extensions=None,
namespace="http://openconfig.net/yang/network-instance",
defining_module="openconfig-network-instance",
yang_type="container",
is_config=True,
)
def _get_policy_forwarding(self):
"""
Getter method for policy_forwarding, mapped from YANG variable /network_instances/network_instance/policy_forwarding (container)
YANG Description: Configuration and operational state relating to policy-forwarding within
a network instance.
"""
return self.__policy_forwarding
def _set_policy_forwarding(self, v, load=False):
"""
Setter method for policy_forwarding, mapped from YANG variable /network_instances/network_instance/policy_forwarding (container)
If this variable is read-only (config: false) in the
source YANG file, then _set_policy_forwarding is considered as a private
method. Backends looking to populate this variable should
do so via calling thisObj._set_policy_forwarding() directly.
YANG Description: Configuration and operational state relating to policy-forwarding within
a network instance.
"""
if hasattr(v, "_utype"):
v = v._utype(v)
try:
t = YANGDynClass(
v,
base=policy_forwarding.policy_forwarding,
is_container="container",
yang_name="policy-forwarding",
parent=self,
path_helper=self._path_helper,
extmethods=self._extmethods,
register_paths=True,
extensions=None,
namespace="http://openconfig.net/yang/network-instance",
defining_module="openconfig-network-instance",
yang_type="container",
is_config=True,
)
except (TypeError, ValueError):
raise ValueError(
{
"error-string": """policy_forwarding must be of a type compatible with container""",
"defined-type": "container",
"generated-type": """YANGDynClass(base=policy_forwarding.policy_forwarding, is_container='container', yang_name="policy-forwarding", parent=self, path_helper=self._path_helper, extmethods=self._extmethods, register_paths=True, extensions=None, namespace='http://openconfig.net/yang/network-instance', defining_module='openconfig-network-instance', yang_type='container', is_config=True)""",
}
)
self.__policy_forwarding = t
if hasattr(self, "_set"):
self._set()
def _unset_policy_forwarding(self):
self.__policy_forwarding = YANGDynClass(
base=policy_forwarding.policy_forwarding,
is_container="container",
yang_name="policy-forwarding",
parent=self,
path_helper=self._path_helper,
extmethods=self._extmethods,
register_paths=True,
extensions=None,
namespace="http://openconfig.net/yang/network-instance",
defining_module="openconfig-network-instance",
yang_type="container",
is_config=True,
)
def _get_afts(self):
"""
Getter method for afts, mapped from YANG variable /network_instances/network_instance/afts (container)
YANG Description: The abstract forwarding tables (AFTs) that are associated
with the network instance. An AFT is instantiated per-protocol
running within the network-instance - such that one exists for
IPv4 Unicast, IPv6 Unicast, MPLS, L2 forwarding entries, etc.
A forwarding entry within the FIB has a set of next-hops,
which may be a reference to an entry within another table -
e.g., where a Layer 3 next-hop has an associated Layer 2
forwarding entry.
"""
return self.__afts
def _set_afts(self, v, load=False):
"""
Setter method for afts, mapped from YANG variable /network_instances/network_instance/afts (container)
If this variable is read-only (config: false) in the
source YANG file, then _set_afts is considered as a private
method. Backends looking to populate this variable should
do so via calling thisObj._set_afts() directly.
YANG Description: The abstract forwarding tables (AFTs) that are associated
with the network instance. An AFT is instantiated per-protocol
running within the network-instance - such that one exists for
IPv4 Unicast, IPv6 Unicast, MPLS, L2 forwarding entries, etc.
A forwarding entry within the FIB has a set of next-hops,
which may be a reference to an entry within another table -
e.g., where a Layer 3 next-hop has an associated Layer 2
forwarding entry.
"""
if hasattr(v, "_utype"):
v = v._utype(v)
try:
t = YANGDynClass(
v,
base=afts.afts,
is_container="container",
yang_name="afts",
parent=self,
path_helper=self._path_helper,
extmethods=self._extmethods,
register_paths=True,
extensions=None,
namespace="http://openconfig.net/yang/network-instance",
defining_module="openconfig-network-instance",
yang_type="container",
is_config=True,
)
except (TypeError, ValueError):
raise ValueError(
{
"error-string": """afts must be of a type compatible with container""",
"defined-type": "container",
"generated-type": """YANGDynClass(base=afts.afts, is_container='container', yang_name="afts", parent=self, path_helper=self._path_helper, extmethods=self._extmethods, register_paths=True, extensions=None, namespace='http://openconfig.net/yang/network-instance', defining_module='openconfig-network-instance', yang_type='container', is_config=True)""",
}
)
self.__afts = t
if hasattr(self, "_set"):
self._set()
def _unset_afts(self):
self.__afts = YANGDynClass(
base=afts.afts,
is_container="container",
yang_name="afts",
parent=self,
path_helper=self._path_helper,
extmethods=self._extmethods,
register_paths=True,
extensions=None,
namespace="http://openconfig.net/yang/network-instance",
defining_module="openconfig-network-instance",
yang_type="container",
is_config=True,
)
def _get_protocols(self):
"""
Getter method for protocols, mapped from YANG variable /network_instances/network_instance/protocols (container)
YANG Description: The routing protocols that are enabled for this
network-instance.
"""
return self.__protocols
def _set_protocols(self, v, load=False):
"""
Setter method for protocols, mapped from YANG variable /network_instances/network_instance/protocols (container)
If this variable is read-only (config: false) in the
source YANG file, then _set_protocols is considered as a private
method. Backends looking to populate this variable should
do so via calling thisObj._set_protocols() directly.
YANG Description: The routing protocols that are enabled for this
network-instance.
"""
if hasattr(v, "_utype"):
v = v._utype(v)
try:
t = YANGDynClass(
v,
base=protocols.protocols,
is_container="container",
yang_name="protocols",
parent=self,
path_helper=self._path_helper,
extmethods=self._extmethods,
register_paths=True,
extensions=None,
namespace="http://openconfig.net/yang/network-instance",
defining_module="openconfig-network-instance",
yang_type="container",
is_config=True,
)
except (TypeError, ValueError):
raise ValueError(
{
"error-string": """protocols must be of a type compatible with container""",
"defined-type": "container",
"generated-type": """YANGDynClass(base=protocols.protocols, is_container='container', yang_name="protocols", parent=self, path_helper=self._path_helper, extmethods=self._extmethods, register_paths=True, extensions=None, namespace='http://openconfig.net/yang/network-instance', defining_module='openconfig-network-instance', yang_type='container', is_config=True)""",
}
)
self.__protocols = t
if hasattr(self, "_set"):
self._set()
def _unset_protocols(self):
self.__protocols = YANGDynClass(
base=protocols.protocols,
is_container="container",
yang_name="protocols",
parent=self,
path_helper=self._path_helper,
extmethods=self._extmethods,
register_paths=True,
extensions=None,
namespace="http://openconfig.net/yang/network-instance",
defining_module="openconfig-network-instance",
yang_type="container",
is_config=True,
)
name = __builtin__.property(_get_name, _set_name)
fdb = __builtin__.property(_get_fdb, _set_fdb)
config = __builtin__.property(_get_config, _set_config)
state = __builtin__.property(_get_state, _set_state)
encapsulation = __builtin__.property(_get_encapsulation, _set_encapsulation)
inter_instance_policies = __builtin__.property(
_get_inter_instance_policies, _set_inter_instance_policies
)
table_connections = __builtin__.property(
_get_table_connections, _set_table_connections
)
interfaces = __builtin__.property(_get_interfaces, _set_interfaces)
tables = __builtin__.property(_get_tables, _set_tables)
connection_points = __builtin__.property(
_get_connection_points, _set_connection_points
)
mpls = __builtin__.property(_get_mpls, _set_mpls)
segment_routing = __builtin__.property(_get_segment_routing, _set_segment_routing)
vlans = __builtin__.property(_get_vlans, _set_vlans)
policy_forwarding = __builtin__.property(
_get_policy_forwarding, _set_policy_forwarding
)
afts = __builtin__.property(_get_afts, _set_afts)
protocols = __builtin__.property(_get_protocols, _set_protocols)
_pyangbind_elements = OrderedDict(
[
("name", name),
("fdb", fdb),
("config", config),
("state", state),
("encapsulation", encapsulation),
("inter_instance_policies", inter_instance_policies),
("table_connections", table_connections),
("interfaces", interfaces),
("tables", tables),
("connection_points", connection_points),
("mpls", mpls),
("segment_routing", segment_routing),
("vlans", vlans),
("policy_forwarding", policy_forwarding),
("afts", afts),
("protocols", protocols),
]
)
from . import fdb
from . import config
from . import state
from . import encapsulation
from . import inter_instance_policies
from . import table_connections
from . import interfaces
from . import tables
from . import connection_points
from . import mpls
from . import segment_routing
from . import vlans
from . import policy_forwarding
from . import afts
from . import protocols
class network_instance(PybindBase):
"""
This class was auto-generated by the PythonClass plugin for PYANG
from YANG module openconfig-network-instance-l2 - based on the path /network-instances/network-instance. Each member element of
the container is represented as a class variable - with a specific
YANG type.
YANG Description: Network instances configured on the local system
"""
__slots__ = (
"_path_helper",
"_extmethods",
"__name",
"__fdb",
"__config",
"__state",
"__encapsulation",
"__inter_instance_policies",
"__table_connections",
"__interfaces",
"__tables",
"__connection_points",
"__mpls",
"__segment_routing",
"__vlans",
"__policy_forwarding",
"__afts",
"__protocols",
)
_yang_name = "network-instance"
_pybind_generated_by = "container"
def __init__(self, *args, **kwargs):
self._path_helper = False
self._extmethods = False
self.__name = YANGDynClass(
base=six.text_type,
is_leaf=True,
yang_name="name",
parent=self,
path_helper=self._path_helper,
extmethods=self._extmethods,
register_paths=True,
is_keyval=True,
namespace="http://openconfig.net/yang/network-instance",
defining_module="openconfig-network-instance",
yang_type="leafref",
is_config=True,
)
self.__fdb = YANGDynClass(
base=fdb.fdb,
is_container="container",
yang_name="fdb",
parent=self,
path_helper=self._path_helper,
extmethods=self._extmethods,
register_paths=True,
extensions=None,
namespace="http://openconfig.net/yang/network-instance",
defining_module="openconfig-network-instance",
yang_type="container",
is_config=True,
)
self.__config = YANGDynClass(
base=config.config,
is_container="container",
yang_name="config",
parent=self,
path_helper=self._path_helper,
extmethods=self._extmethods,
register_paths=True,
extensions=None,
namespace="http://openconfig.net/yang/network-instance",
defining_module="openconfig-network-instance",
yang_type="container",
is_config=True,
)
self.__state = YANGDynClass(
base=state.state,
is_container="container",
yang_name="state",
parent=self,
path_helper=self._path_helper,
extmethods=self._extmethods,
register_paths=True,
extensions=None,
namespace="http://openconfig.net/yang/network-instance",
defining_module="openconfig-network-instance",
yang_type="container",
is_config=True,
)
self.__encapsulation = YANGDynClass(
base=encapsulation.encapsulation,
is_container="container",
yang_name="encapsulation",
parent=self,
path_helper=self._path_helper,
extmethods=self._extmethods,
register_paths=True,
extensions=None,
namespace="http://openconfig.net/yang/network-instance",
defining_module="openconfig-network-instance",
yang_type="container",
is_config=True,
)
self.__inter_instance_policies = YANGDynClass(
base=inter_instance_policies.inter_instance_policies,
is_container="container",
yang_name="inter-instance-policies",
parent=self,
path_helper=self._path_helper,
extmethods=self._extmethods,
register_paths=True,
extensions=None,
namespace="http://openconfig.net/yang/network-instance",
defining_module="openconfig-network-instance",
yang_type="container",
is_config=True,
)
self.__table_connections = YANGDynClass(
base=table_connections.table_connections,
is_container="container",
yang_name="table-connections",
parent=self,
path_helper=self._path_helper,
extmethods=self._extmethods,
register_paths=True,
extensions=None,
namespace="http://openconfig.net/yang/network-instance",
defining_module="openconfig-network-instance",
yang_type="container",
is_config=True,
)
self.__interfaces = YANGDynClass(
base=interfaces.interfaces,
is_container="container",
yang_name="interfaces",
parent=self,
path_helper=self._path_helper,
extmethods=self._extmethods,
register_paths=True,
extensions=None,
namespace="http://openconfig.net/yang/network-instance",
defining_module="openconfig-network-instance",
yang_type="container",
is_config=True,
)
self.__tables = YANGDynClass(
base=tables.tables,
is_container="container",
yang_name="tables",
parent=self,
path_helper=self._path_helper,
extmethods=self._extmethods,
register_paths=True,
extensions=None,
namespace="http://openconfig.net/yang/network-instance",
defining_module="openconfig-network-instance",
yang_type="container",
is_config=True,
)
self.__connection_points = YANGDynClass(
base=connection_points.connection_points,
is_container="container",
yang_name="connection-points",
parent=self,
path_helper=self._path_helper,
extmethods=self._extmethods,
register_paths=True,
extensions=None,
namespace="http://openconfig.net/yang/network-instance",
defining_module="openconfig-network-instance",
yang_type="container",
is_config=True,
)
self.__mpls = YANGDynClass(
base=mpls.mpls,
is_container="container",
yang_name="mpls",
parent=self,
path_helper=self._path_helper,
extmethods=self._extmethods,
register_paths=True,
extensions=None,
namespace="http://openconfig.net/yang/network-instance",
defining_module="openconfig-network-instance",
yang_type="container",
is_config=True,
)
self.__segment_routing = YANGDynClass(
base=segment_routing.segment_routing,
is_container="container",
yang_name="segment-routing",
parent=self,
path_helper=self._path_helper,
extmethods=self._extmethods,
register_paths=True,
extensions=None,
namespace="http://openconfig.net/yang/network-instance",
defining_module="openconfig-network-instance",
yang_type="container",
is_config=True,
)
self.__vlans = YANGDynClass(
base=vlans.vlans,
is_container="container",
yang_name="vlans",
parent=self,
path_helper=self._path_helper,
extmethods=self._extmethods,
register_paths=True,
extensions=None,
namespace="http://openconfig.net/yang/network-instance",
defining_module="openconfig-network-instance",
yang_type="container",
is_config=True,
)
self.__policy_forwarding = YANGDynClass(
base=policy_forwarding.policy_forwarding,
is_container="container",
yang_name="policy-forwarding",
parent=self,
path_helper=self._path_helper,
extmethods=self._extmethods,
register_paths=True,
extensions=None,
namespace="http://openconfig.net/yang/network-instance",
defining_module="openconfig-network-instance",
yang_type="container",
is_config=True,
)
self.__afts = YANGDynClass(
base=afts.afts,
is_container="container",
yang_name="afts",
parent=self,
path_helper=self._path_helper,
extmethods=self._extmethods,
register_paths=True,
extensions=None,
namespace="http://openconfig.net/yang/network-instance",
defining_module="openconfig-network-instance",
yang_type="container",
is_config=True,
)
self.__protocols = YANGDynClass(
base=protocols.protocols,
is_container="container",
yang_name="protocols",
parent=self,
path_helper=self._path_helper,
extmethods=self._extmethods,
register_paths=True,
extensions=None,
namespace="http://openconfig.net/yang/network-instance",
defining_module="openconfig-network-instance",
yang_type="container",
is_config=True,
)
load = kwargs.pop("load", None)
if args:
if len(args) > 1:
raise TypeError("cannot create a YANG container with >1 argument")
all_attr = True
for e in self._pyangbind_elements:
if not hasattr(args[0], e):
all_attr = False
break
if not all_attr:
raise ValueError("Supplied object did not have the correct attributes")
for e in self._pyangbind_elements:
nobj = getattr(args[0], e)
if nobj._changed() is False:
continue
setmethod = getattr(self, "_set_%s" % e)
if load is None:
setmethod(getattr(args[0], e))
else:
setmethod(getattr(args[0], e), load=load)
def _path(self):
if hasattr(self, "_parent"):
return self._parent._path() + [self._yang_name]
else:
return ["network-instances", "network-instance"]
def _get_name(self):
"""
Getter method for name, mapped from YANG variable /network_instances/network_instance/name (leafref)
YANG Description: A unique name identifying the network instance
"""
return self.__name
def _set_name(self, v, load=False):
"""
Setter method for name, mapped from YANG variable /network_instances/network_instance/name (leafref)
If this variable is read-only (config: false) in the
source YANG file, then _set_name is considered as a private
method. Backends looking to populate this variable should
do so via calling thisObj._set_name() directly.
YANG Description: A unique name identifying the network instance
"""
parent = getattr(self, "_parent", None)
if parent is not None and load is False:
raise AttributeError(
"Cannot set keys directly when" + " within an instantiated list"
)
if hasattr(v, "_utype"):
v = v._utype(v)
try:
t = YANGDynClass(
v,
base=six.text_type,
is_leaf=True,
yang_name="name",
parent=self,
path_helper=self._path_helper,
extmethods=self._extmethods,
register_paths=True,
is_keyval=True,
namespace="http://openconfig.net/yang/network-instance",
defining_module="openconfig-network-instance",
yang_type="leafref",
is_config=True,
)
except (TypeError, ValueError):
raise ValueError(
{
"error-string": """name must be of a type compatible with leafref""",
"defined-type": "leafref",
"generated-type": """YANGDynClass(base=six.text_type, is_leaf=True, yang_name="name", parent=self, path_helper=self._path_helper, extmethods=self._extmethods, register_paths=True, is_keyval=True, namespace='http://openconfig.net/yang/network-instance', defining_module='openconfig-network-instance', yang_type='leafref', is_config=True)""",
}
)
self.__name = t
if hasattr(self, "_set"):
self._set()
def _unset_name(self):
self.__name = YANGDynClass(
base=six.text_type,
is_leaf=True,
yang_name="name",
parent=self,
path_helper=self._path_helper,
extmethods=self._extmethods,
register_paths=True,
is_keyval=True,
namespace="http://openconfig.net/yang/network-instance",
defining_module="openconfig-network-instance",
yang_type="leafref",
is_config=True,
)
def _get_fdb(self):
"""
Getter method for fdb, mapped from YANG variable /network_instances/network_instance/fdb (container)
YANG Description: Operational state and configuration parameters relating to
the forwarding database of the network instance
"""
return self.__fdb
def _set_fdb(self, v, load=False):
"""
Setter method for fdb, mapped from YANG variable /network_instances/network_instance/fdb (container)
If this variable is read-only (config: false) in the
source YANG file, then _set_fdb is considered as a private
method. Backends looking to populate this variable should
do so via calling thisObj._set_fdb() directly.
YANG Description: Operational state and configuration parameters relating to
the forwarding database of the network instance
"""
if hasattr(v, "_utype"):
v = v._utype(v)
try:
t = YANGDynClass(
v,
base=fdb.fdb,
is_container="container",
yang_name="fdb",
parent=self,
path_helper=self._path_helper,
extmethods=self._extmethods,
register_paths=True,
extensions=None,
namespace="http://openconfig.net/yang/network-instance",
defining_module="openconfig-network-instance",
yang_type="container",
is_config=True,
)
except (TypeError, ValueError):
raise ValueError(
{
"error-string": """fdb must be of a type compatible with container""",
"defined-type": "container",
"generated-type": """YANGDynClass(base=fdb.fdb, is_container='container', yang_name="fdb", parent=self, path_helper=self._path_helper, extmethods=self._extmethods, register_paths=True, extensions=None, namespace='http://openconfig.net/yang/network-instance', defining_module='openconfig-network-instance', yang_type='container', is_config=True)""",
}
)
self.__fdb = t
if hasattr(self, "_set"):
self._set()
def _unset_fdb(self):
self.__fdb = YANGDynClass(
base=fdb.fdb,
is_container="container",
yang_name="fdb",
parent=self,
path_helper=self._path_helper,
extmethods=self._extmethods,
register_paths=True,
extensions=None,
namespace="http://openconfig.net/yang/network-instance",
defining_module="openconfig-network-instance",
yang_type="container",
is_config=True,
)
def _get_config(self):
"""
Getter method for config, mapped from YANG variable /network_instances/network_instance/config (container)
YANG Description: Configuration parameters relating to a network
instance
"""
return self.__config
def _set_config(self, v, load=False):
"""
Setter method for config, mapped from YANG variable /network_instances/network_instance/config (container)
If this variable is read-only (config: false) in the
source YANG file, then _set_config is considered as a private
method. Backends looking to populate this variable should
do so via calling thisObj._set_config() directly.
YANG Description: Configuration parameters relating to a network
instance
"""
if hasattr(v, "_utype"):
v = v._utype(v)
try:
t = YANGDynClass(
v,
base=config.config,
is_container="container",
yang_name="config",
parent=self,
path_helper=self._path_helper,
extmethods=self._extmethods,
register_paths=True,
extensions=None,
namespace="http://openconfig.net/yang/network-instance",
defining_module="openconfig-network-instance",
yang_type="container",
is_config=True,
)
except (TypeError, ValueError):
raise ValueError(
{
"error-string": """config must be of a type compatible with container""",
"defined-type": "container",
"generated-type": """YANGDynClass(base=config.config, is_container='container', yang_name="config", parent=self, path_helper=self._path_helper, extmethods=self._extmethods, register_paths=True, extensions=None, namespace='http://openconfig.net/yang/network-instance', defining_module='openconfig-network-instance', yang_type='container', is_config=True)""",
}
)
self.__config = t
if hasattr(self, "_set"):
self._set()
def _unset_config(self):
self.__config = YANGDynClass(
base=config.config,
is_container="container",
yang_name="config",
parent=self,
path_helper=self._path_helper,
extmethods=self._extmethods,
register_paths=True,
extensions=None,
namespace="http://openconfig.net/yang/network-instance",
defining_module="openconfig-network-instance",
yang_type="container",
is_config=True,
)
def _get_state(self):
"""
Getter method for state, mapped from YANG variable /network_instances/network_instance/state (container)
YANG Description: Operational state parameters relating to a network
instance
"""
return self.__state
def _set_state(self, v, load=False):
"""
Setter method for state, mapped from YANG variable /network_instances/network_instance/state (container)
If this variable is read-only (config: false) in the
source YANG file, then _set_state is considered as a private
method. Backends looking to populate this variable should
do so via calling thisObj._set_state() directly.
YANG Description: Operational state parameters relating to a network
instance
"""
if hasattr(v, "_utype"):
v = v._utype(v)
try:
t = YANGDynClass(
v,
base=state.state,
is_container="container",
yang_name="state",
parent=self,
path_helper=self._path_helper,
extmethods=self._extmethods,
register_paths=True,
extensions=None,
namespace="http://openconfig.net/yang/network-instance",
defining_module="openconfig-network-instance",
yang_type="container",
is_config=True,
)
except (TypeError, ValueError):
raise ValueError(
{
"error-string": """state must be of a type compatible with container""",
"defined-type": "container",
"generated-type": """YANGDynClass(base=state.state, is_container='container', yang_name="state", parent=self, path_helper=self._path_helper, extmethods=self._extmethods, register_paths=True, extensions=None, namespace='http://openconfig.net/yang/network-instance', defining_module='openconfig-network-instance', yang_type='container', is_config=True)""",
}
)
self.__state = t
if hasattr(self, "_set"):
self._set()
def _unset_state(self):
self.__state = YANGDynClass(
base=state.state,
is_container="container",
yang_name="state",
parent=self,
path_helper=self._path_helper,
extmethods=self._extmethods,
register_paths=True,
extensions=None,
namespace="http://openconfig.net/yang/network-instance",
defining_module="openconfig-network-instance",
yang_type="container",
is_config=True,
)
def _get_encapsulation(self):
"""
Getter method for encapsulation, mapped from YANG variable /network_instances/network_instance/encapsulation (container)
YANG Description: Configuration parameters relating to the encapsulation
used for the network instance
"""
return self.__encapsulation
def _set_encapsulation(self, v, load=False):
"""
Setter method for encapsulation, mapped from YANG variable /network_instances/network_instance/encapsulation (container)
If this variable is read-only (config: false) in the
source YANG file, then _set_encapsulation is considered as a private
method. Backends looking to populate this variable should
do so via calling thisObj._set_encapsulation() directly.
YANG Description: Configuration parameters relating to the encapsulation
used for the network instance
"""
if hasattr(v, "_utype"):
v = v._utype(v)
try:
t = YANGDynClass(
v,
base=encapsulation.encapsulation,
is_container="container",
yang_name="encapsulation",
parent=self,
path_helper=self._path_helper,
extmethods=self._extmethods,
register_paths=True,
extensions=None,
namespace="http://openconfig.net/yang/network-instance",
defining_module="openconfig-network-instance",
yang_type="container",
is_config=True,
)
except (TypeError, ValueError):
raise ValueError(
{
"error-string": """encapsulation must be of a type compatible with container""",
"defined-type": "container",
"generated-type": """YANGDynClass(base=encapsulation.encapsulation, is_container='container', yang_name="encapsulation", parent=self, path_helper=self._path_helper, extmethods=self._extmethods, register_paths=True, extensions=None, namespace='http://openconfig.net/yang/network-instance', defining_module='openconfig-network-instance', yang_type='container', is_config=True)""",
}
)
self.__encapsulation = t
if hasattr(self, "_set"):
self._set()
def _unset_encapsulation(self):
self.__encapsulation = YANGDynClass(
base=encapsulation.encapsulation,
is_container="container",
yang_name="encapsulation",
parent=self,
path_helper=self._path_helper,
extmethods=self._extmethods,
register_paths=True,
extensions=None,
namespace="http://openconfig.net/yang/network-instance",
defining_module="openconfig-network-instance",
yang_type="container",
is_config=True,
)
def _get_inter_instance_policies(self):
"""
Getter method for inter_instance_policies, mapped from YANG variable /network_instances/network_instance/inter_instance_policies (container)
YANG Description: Policies dictating how RIB or FIB entries are imported
to and exported from this instance
"""
return self.__inter_instance_policies
def _set_inter_instance_policies(self, v, load=False):
"""
Setter method for inter_instance_policies, mapped from YANG variable /network_instances/network_instance/inter_instance_policies (container)
If this variable is read-only (config: false) in the
source YANG file, then _set_inter_instance_policies is considered as a private
method. Backends looking to populate this variable should
do so via calling thisObj._set_inter_instance_policies() directly.
YANG Description: Policies dictating how RIB or FIB entries are imported
to and exported from this instance
"""
if hasattr(v, "_utype"):
v = v._utype(v)
try:
t = YANGDynClass(
v,
base=inter_instance_policies.inter_instance_policies,
is_container="container",
yang_name="inter-instance-policies",
parent=self,
path_helper=self._path_helper,
extmethods=self._extmethods,
register_paths=True,
extensions=None,
namespace="http://openconfig.net/yang/network-instance",
defining_module="openconfig-network-instance",
yang_type="container",
is_config=True,
)
except (TypeError, ValueError):
raise ValueError(
{
"error-string": """inter_instance_policies must be of a type compatible with container""",
"defined-type": "container",
"generated-type": """YANGDynClass(base=inter_instance_policies.inter_instance_policies, is_container='container', yang_name="inter-instance-policies", parent=self, path_helper=self._path_helper, extmethods=self._extmethods, register_paths=True, extensions=None, namespace='http://openconfig.net/yang/network-instance', defining_module='openconfig-network-instance', yang_type='container', is_config=True)""",
}
)
self.__inter_instance_policies = t
if hasattr(self, "_set"):
self._set()
def _unset_inter_instance_policies(self):
self.__inter_instance_policies = YANGDynClass(
base=inter_instance_policies.inter_instance_policies,
is_container="container",
yang_name="inter-instance-policies",
parent=self,
path_helper=self._path_helper,
extmethods=self._extmethods,
register_paths=True,
extensions=None,
namespace="http://openconfig.net/yang/network-instance",
defining_module="openconfig-network-instance",
yang_type="container",
is_config=True,
)
def _get_table_connections(self):
"""
Getter method for table_connections, mapped from YANG variable /network_instances/network_instance/table_connections (container)
YANG Description: Policies dictating how RIB or FIB entries are propagated
between tables
"""
return self.__table_connections
def _set_table_connections(self, v, load=False):
"""
Setter method for table_connections, mapped from YANG variable /network_instances/network_instance/table_connections (container)
If this variable is read-only (config: false) in the
source YANG file, then _set_table_connections is considered as a private
method. Backends looking to populate this variable should
do so via calling thisObj._set_table_connections() directly.
YANG Description: Policies dictating how RIB or FIB entries are propagated
between tables
"""
if hasattr(v, "_utype"):
v = v._utype(v)
try:
t = YANGDynClass(
v,
base=table_connections.table_connections,
is_container="container",
yang_name="table-connections",
parent=self,
path_helper=self._path_helper,
extmethods=self._extmethods,
register_paths=True,
extensions=None,
namespace="http://openconfig.net/yang/network-instance",
defining_module="openconfig-network-instance",
yang_type="container",
is_config=True,
)
except (TypeError, ValueError):
raise ValueError(
{
"error-string": """table_connections must be of a type compatible with container""",
"defined-type": "container",
"generated-type": """YANGDynClass(base=table_connections.table_connections, is_container='container', yang_name="table-connections", parent=self, path_helper=self._path_helper, extmethods=self._extmethods, register_paths=True, extensions=None, namespace='http://openconfig.net/yang/network-instance', defining_module='openconfig-network-instance', yang_type='container', is_config=True)""",
}
)
self.__table_connections = t
if hasattr(self, "_set"):
self._set()
def _unset_table_connections(self):
self.__table_connections = YANGDynClass(
base=table_connections.table_connections,
is_container="container",
yang_name="table-connections",
parent=self,
path_helper=self._path_helper,
extmethods=self._extmethods,
register_paths=True,
extensions=None,
namespace="http://openconfig.net/yang/network-instance",
defining_module="openconfig-network-instance",
yang_type="container",
is_config=True,
)
def _get_interfaces(self):
"""
Getter method for interfaces, mapped from YANG variable /network_instances/network_instance/interfaces (container)
YANG Description: The interfaces that are associated with this network
instance
"""
return self.__interfaces
def _set_interfaces(self, v, load=False):
"""
Setter method for interfaces, mapped from YANG variable /network_instances/network_instance/interfaces (container)
If this variable is read-only (config: false) in the
source YANG file, then _set_interfaces is considered as a private
method. Backends looking to populate this variable should
do so via calling thisObj._set_interfaces() directly.
YANG Description: The interfaces that are associated with this network
instance
"""
if hasattr(v, "_utype"):
v = v._utype(v)
try:
t = YANGDynClass(
v,
base=interfaces.interfaces,
is_container="container",
yang_name="interfaces",
parent=self,
path_helper=self._path_helper,
extmethods=self._extmethods,
register_paths=True,
extensions=None,
namespace="http://openconfig.net/yang/network-instance",
defining_module="openconfig-network-instance",
yang_type="container",
is_config=True,
)
except (TypeError, ValueError):
raise ValueError(
{
"error-string": """interfaces must be of a type compatible with container""",
"defined-type": "container",
"generated-type": """YANGDynClass(base=interfaces.interfaces, is_container='container', yang_name="interfaces", parent=self, path_helper=self._path_helper, extmethods=self._extmethods, register_paths=True, extensions=None, namespace='http://openconfig.net/yang/network-instance', defining_module='openconfig-network-instance', yang_type='container', is_config=True)""",
}
)
self.__interfaces = t
if hasattr(self, "_set"):
self._set()
def _unset_interfaces(self):
self.__interfaces = YANGDynClass(
base=interfaces.interfaces,
is_container="container",
yang_name="interfaces",
parent=self,
path_helper=self._path_helper,
extmethods=self._extmethods,
register_paths=True,
extensions=None,
namespace="http://openconfig.net/yang/network-instance",
defining_module="openconfig-network-instance",
yang_type="container",
is_config=True,
)
def _get_tables(self):
"""
Getter method for tables, mapped from YANG variable /network_instances/network_instance/tables (container)
YANG Description: The routing tables that are managed by this network
instance
"""
return self.__tables
def _set_tables(self, v, load=False):
"""
Setter method for tables, mapped from YANG variable /network_instances/network_instance/tables (container)
If this variable is read-only (config: false) in the
source YANG file, then _set_tables is considered as a private
method. Backends looking to populate this variable should
do so via calling thisObj._set_tables() directly.
YANG Description: The routing tables that are managed by this network
instance
"""
if hasattr(v, "_utype"):
v = v._utype(v)
try:
t = YANGDynClass(
v,
base=tables.tables,
is_container="container",
yang_name="tables",
parent=self,
path_helper=self._path_helper,
extmethods=self._extmethods,
register_paths=True,
extensions=None,
namespace="http://openconfig.net/yang/network-instance",
defining_module="openconfig-network-instance",
yang_type="container",
is_config=True,
)
except (TypeError, ValueError):
raise ValueError(
{
"error-string": """tables must be of a type compatible with container""",
"defined-type": "container",
"generated-type": """YANGDynClass(base=tables.tables, is_container='container', yang_name="tables", parent=self, path_helper=self._path_helper, extmethods=self._extmethods, register_paths=True, extensions=None, namespace='http://openconfig.net/yang/network-instance', defining_module='openconfig-network-instance', yang_type='container', is_config=True)""",
}
)
self.__tables = t
if hasattr(self, "_set"):
self._set()
def _unset_tables(self):
self.__tables = YANGDynClass(
base=tables.tables,
is_container="container",
yang_name="tables",
parent=self,
path_helper=self._path_helper,
extmethods=self._extmethods,
register_paths=True,
extensions=None,
namespace="http://openconfig.net/yang/network-instance",
defining_module="openconfig-network-instance",
yang_type="container",
is_config=True,
)
def _get_connection_points(self):
"""
Getter method for connection_points, mapped from YANG variable /network_instances/network_instance/connection_points (container)
YANG Description: The set of connection points within a forwarding
instance
"""
return self.__connection_points
def _set_connection_points(self, v, load=False):
"""
Setter method for connection_points, mapped from YANG variable /network_instances/network_instance/connection_points (container)
If this variable is read-only (config: false) in the
source YANG file, then _set_connection_points is considered as a private
method. Backends looking to populate this variable should
do so via calling thisObj._set_connection_points() directly.
YANG Description: The set of connection points within a forwarding
instance
"""
if hasattr(v, "_utype"):
v = v._utype(v)
try:
t = YANGDynClass(
v,
base=connection_points.connection_points,
is_container="container",
yang_name="connection-points",
parent=self,
path_helper=self._path_helper,
extmethods=self._extmethods,
register_paths=True,
extensions=None,
namespace="http://openconfig.net/yang/network-instance",
defining_module="openconfig-network-instance",
yang_type="container",
is_config=True,
)
except (TypeError, ValueError):
raise ValueError(
{
"error-string": """connection_points must be of a type compatible with container""",
"defined-type": "container",
"generated-type": """YANGDynClass(base=connection_points.connection_points, is_container='container', yang_name="connection-points", parent=self, path_helper=self._path_helper, extmethods=self._extmethods, register_paths=True, extensions=None, namespace='http://openconfig.net/yang/network-instance', defining_module='openconfig-network-instance', yang_type='container', is_config=True)""",
}
)
self.__connection_points = t
if hasattr(self, "_set"):
self._set()
def _unset_connection_points(self):
self.__connection_points = YANGDynClass(
base=connection_points.connection_points,
is_container="container",
yang_name="connection-points",
parent=self,
path_helper=self._path_helper,
extmethods=self._extmethods,
register_paths=True,
extensions=None,
namespace="http://openconfig.net/yang/network-instance",
defining_module="openconfig-network-instance",
yang_type="container",
is_config=True,
)
def _get_mpls(self):
"""
Getter method for mpls, mapped from YANG variable /network_instances/network_instance/mpls (container)
YANG Description: Anchor point for mpls configuration and operational
data
"""
return self.__mpls
def _set_mpls(self, v, load=False):
"""
Setter method for mpls, mapped from YANG variable /network_instances/network_instance/mpls (container)
If this variable is read-only (config: false) in the
source YANG file, then _set_mpls is considered as a private
method. Backends looking to populate this variable should
do so via calling thisObj._set_mpls() directly.
YANG Description: Anchor point for mpls configuration and operational
data
"""
if hasattr(v, "_utype"):
v = v._utype(v)
try:
t = YANGDynClass(
v,
base=mpls.mpls,
is_container="container",
yang_name="mpls",
parent=self,
path_helper=self._path_helper,
extmethods=self._extmethods,
register_paths=True,
extensions=None,
namespace="http://openconfig.net/yang/network-instance",
defining_module="openconfig-network-instance",
yang_type="container",
is_config=True,
)
except (TypeError, ValueError):
raise ValueError(
{
"error-string": """mpls must be of a type compatible with container""",
"defined-type": "container",
"generated-type": """YANGDynClass(base=mpls.mpls, is_container='container', yang_name="mpls", parent=self, path_helper=self._path_helper, extmethods=self._extmethods, register_paths=True, extensions=None, namespace='http://openconfig.net/yang/network-instance', defining_module='openconfig-network-instance', yang_type='container', is_config=True)""",
}
)
self.__mpls = t
if hasattr(self, "_set"):
self._set()
def _unset_mpls(self):
self.__mpls = YANGDynClass(
base=mpls.mpls,
is_container="container",
yang_name="mpls",
parent=self,
path_helper=self._path_helper,
extmethods=self._extmethods,
register_paths=True,
extensions=None,
namespace="http://openconfig.net/yang/network-instance",
defining_module="openconfig-network-instance",
yang_type="container",
is_config=True,
)
def _get_segment_routing(self):
"""
Getter method for segment_routing, mapped from YANG variable /network_instances/network_instance/segment_routing (container)
YANG Description: Configuration and operational state parameters relating to
segment routing.
"""
return self.__segment_routing
def _set_segment_routing(self, v, load=False):
"""
Setter method for segment_routing, mapped from YANG variable /network_instances/network_instance/segment_routing (container)
If this variable is read-only (config: false) in the
source YANG file, then _set_segment_routing is considered as a private
method. Backends looking to populate this variable should
do so via calling thisObj._set_segment_routing() directly.
YANG Description: Configuration and operational state parameters relating to
segment routing.
"""
if hasattr(v, "_utype"):
v = v._utype(v)
try:
t = YANGDynClass(
v,
base=segment_routing.segment_routing,
is_container="container",
yang_name="segment-routing",
parent=self,
path_helper=self._path_helper,
extmethods=self._extmethods,
register_paths=True,
extensions=None,
namespace="http://openconfig.net/yang/network-instance",
defining_module="openconfig-network-instance",
yang_type="container",
is_config=True,
)
except (TypeError, ValueError):
raise ValueError(
{
"error-string": """segment_routing must be of a type compatible with container""",
"defined-type": "container",
"generated-type": """YANGDynClass(base=segment_routing.segment_routing, is_container='container', yang_name="segment-routing", parent=self, path_helper=self._path_helper, extmethods=self._extmethods, register_paths=True, extensions=None, namespace='http://openconfig.net/yang/network-instance', defining_module='openconfig-network-instance', yang_type='container', is_config=True)""",
}
)
self.__segment_routing = t
if hasattr(self, "_set"):
self._set()
def _unset_segment_routing(self):
self.__segment_routing = YANGDynClass(
base=segment_routing.segment_routing,
is_container="container",
yang_name="segment-routing",
parent=self,
path_helper=self._path_helper,
extmethods=self._extmethods,
register_paths=True,
extensions=None,
namespace="http://openconfig.net/yang/network-instance",
defining_module="openconfig-network-instance",
yang_type="container",
is_config=True,
)
def _get_vlans(self):
"""
Getter method for vlans, mapped from YANG variable /network_instances/network_instance/vlans (container)
YANG Description: Container for VLAN configuration and state
variables
"""
return self.__vlans
def _set_vlans(self, v, load=False):
"""
Setter method for vlans, mapped from YANG variable /network_instances/network_instance/vlans (container)
If this variable is read-only (config: false) in the
source YANG file, then _set_vlans is considered as a private
method. Backends looking to populate this variable should
do so via calling thisObj._set_vlans() directly.
YANG Description: Container for VLAN configuration and state
variables
"""
if hasattr(v, "_utype"):
v = v._utype(v)
try:
t = YANGDynClass(
v,
base=vlans.vlans,
is_container="container",
yang_name="vlans",
parent=self,
path_helper=self._path_helper,
extmethods=self._extmethods,
register_paths=True,
extensions=None,
namespace="http://openconfig.net/yang/network-instance",
defining_module="openconfig-network-instance",
yang_type="container",
is_config=True,
)
except (TypeError, ValueError):
raise ValueError(
{
"error-string": """vlans must be of a type compatible with container""",
"defined-type": "container",
"generated-type": """YANGDynClass(base=vlans.vlans, is_container='container', yang_name="vlans", parent=self, path_helper=self._path_helper, extmethods=self._extmethods, register_paths=True, extensions=None, namespace='http://openconfig.net/yang/network-instance', defining_module='openconfig-network-instance', yang_type='container', is_config=True)""",
}
)
self.__vlans = t
if hasattr(self, "_set"):
self._set()
def _unset_vlans(self):
self.__vlans = YANGDynClass(
base=vlans.vlans,
is_container="container",
yang_name="vlans",
parent=self,
path_helper=self._path_helper,
extmethods=self._extmethods,
register_paths=True,
extensions=None,
namespace="http://openconfig.net/yang/network-instance",
defining_module="openconfig-network-instance",
yang_type="container",
is_config=True,
)
def _get_policy_forwarding(self):
"""
Getter method for policy_forwarding, mapped from YANG variable /network_instances/network_instance/policy_forwarding (container)
YANG Description: Configuration and operational state relating to policy-forwarding within
a network instance.
"""
return self.__policy_forwarding
def _set_policy_forwarding(self, v, load=False):
"""
Setter method for policy_forwarding, mapped from YANG variable /network_instances/network_instance/policy_forwarding (container)
If this variable is read-only (config: false) in the
source YANG file, then _set_policy_forwarding is considered as a private
method. Backends looking to populate this variable should
do so via calling thisObj._set_policy_forwarding() directly.
YANG Description: Configuration and operational state relating to policy-forwarding within
a network instance.
"""
if hasattr(v, "_utype"):
v = v._utype(v)
try:
t = YANGDynClass(
v,
base=policy_forwarding.policy_forwarding,
is_container="container",
yang_name="policy-forwarding",
parent=self,
path_helper=self._path_helper,
extmethods=self._extmethods,
register_paths=True,
extensions=None,
namespace="http://openconfig.net/yang/network-instance",
defining_module="openconfig-network-instance",
yang_type="container",
is_config=True,
)
except (TypeError, ValueError):
raise ValueError(
{
"error-string": """policy_forwarding must be of a type compatible with container""",
"defined-type": "container",
"generated-type": """YANGDynClass(base=policy_forwarding.policy_forwarding, is_container='container', yang_name="policy-forwarding", parent=self, path_helper=self._path_helper, extmethods=self._extmethods, register_paths=True, extensions=None, namespace='http://openconfig.net/yang/network-instance', defining_module='openconfig-network-instance', yang_type='container', is_config=True)""",
}
)
self.__policy_forwarding = t
if hasattr(self, "_set"):
self._set()
def _unset_policy_forwarding(self):
self.__policy_forwarding = YANGDynClass(
base=policy_forwarding.policy_forwarding,
is_container="container",
yang_name="policy-forwarding",
parent=self,
path_helper=self._path_helper,
extmethods=self._extmethods,
register_paths=True,
extensions=None,
namespace="http://openconfig.net/yang/network-instance",
defining_module="openconfig-network-instance",
yang_type="container",
is_config=True,
)
def _get_afts(self):
"""
Getter method for afts, mapped from YANG variable /network_instances/network_instance/afts (container)
YANG Description: The abstract forwarding tables (AFTs) that are associated
with the network instance. An AFT is instantiated per-protocol
running within the network-instance - such that one exists for
IPv4 Unicast, IPv6 Unicast, MPLS, L2 forwarding entries, etc.
A forwarding entry within the FIB has a set of next-hops,
which may be a reference to an entry within another table -
e.g., where a Layer 3 next-hop has an associated Layer 2
forwarding entry.
"""
return self.__afts
def _set_afts(self, v, load=False):
"""
Setter method for afts, mapped from YANG variable /network_instances/network_instance/afts (container)
If this variable is read-only (config: false) in the
source YANG file, then _set_afts is considered as a private
method. Backends looking to populate this variable should
do so via calling thisObj._set_afts() directly.
YANG Description: The abstract forwarding tables (AFTs) that are associated
with the network instance. An AFT is instantiated per-protocol
running within the network-instance - such that one exists for
IPv4 Unicast, IPv6 Unicast, MPLS, L2 forwarding entries, etc.
A forwarding entry within the FIB has a set of next-hops,
which may be a reference to an entry within another table -
e.g., where a Layer 3 next-hop has an associated Layer 2
forwarding entry.
"""
if hasattr(v, "_utype"):
v = v._utype(v)
try:
t = YANGDynClass(
v,
base=afts.afts,
is_container="container",
yang_name="afts",
parent=self,
path_helper=self._path_helper,
extmethods=self._extmethods,
register_paths=True,
extensions=None,
namespace="http://openconfig.net/yang/network-instance",
defining_module="openconfig-network-instance",
yang_type="container",
is_config=True,
)
except (TypeError, ValueError):
raise ValueError(
{
"error-string": """afts must be of a type compatible with container""",
"defined-type": "container",
"generated-type": """YANGDynClass(base=afts.afts, is_container='container', yang_name="afts", parent=self, path_helper=self._path_helper, extmethods=self._extmethods, register_paths=True, extensions=None, namespace='http://openconfig.net/yang/network-instance', defining_module='openconfig-network-instance', yang_type='container', is_config=True)""",
}
)
self.__afts = t
if hasattr(self, "_set"):
self._set()
def _unset_afts(self):
self.__afts = YANGDynClass(
base=afts.afts,
is_container="container",
yang_name="afts",
parent=self,
path_helper=self._path_helper,
extmethods=self._extmethods,
register_paths=True,
extensions=None,
namespace="http://openconfig.net/yang/network-instance",
defining_module="openconfig-network-instance",
yang_type="container",
is_config=True,
)
def _get_protocols(self):
"""
Getter method for protocols, mapped from YANG variable /network_instances/network_instance/protocols (container)
YANG Description: The routing protocols that are enabled for this
network-instance.
"""
return self.__protocols
def _set_protocols(self, v, load=False):
"""
Setter method for protocols, mapped from YANG variable /network_instances/network_instance/protocols (container)
If this variable is read-only (config: false) in the
source YANG file, then _set_protocols is considered as a private
method. Backends looking to populate this variable should
do so via calling thisObj._set_protocols() directly.
YANG Description: The routing protocols that are enabled for this
network-instance.
"""
if hasattr(v, "_utype"):
v = v._utype(v)
try:
t = YANGDynClass(
v,
base=protocols.protocols,
is_container="container",
yang_name="protocols",
parent=self,
path_helper=self._path_helper,
extmethods=self._extmethods,
register_paths=True,
extensions=None,
namespace="http://openconfig.net/yang/network-instance",
defining_module="openconfig-network-instance",
yang_type="container",
is_config=True,
)
except (TypeError, ValueError):
raise ValueError(
{
"error-string": """protocols must be of a type compatible with container""",
"defined-type": "container",
"generated-type": """YANGDynClass(base=protocols.protocols, is_container='container', yang_name="protocols", parent=self, path_helper=self._path_helper, extmethods=self._extmethods, register_paths=True, extensions=None, namespace='http://openconfig.net/yang/network-instance', defining_module='openconfig-network-instance', yang_type='container', is_config=True)""",
}
)
self.__protocols = t
if hasattr(self, "_set"):
self._set()
def _unset_protocols(self):
self.__protocols = YANGDynClass(
base=protocols.protocols,
is_container="container",
yang_name="protocols",
parent=self,
path_helper=self._path_helper,
extmethods=self._extmethods,
register_paths=True,
extensions=None,
namespace="http://openconfig.net/yang/network-instance",
defining_module="openconfig-network-instance",
yang_type="container",
is_config=True,
)
name = __builtin__.property(_get_name, _set_name)
fdb = __builtin__.property(_get_fdb, _set_fdb)
config = __builtin__.property(_get_config, _set_config)
state = __builtin__.property(_get_state, _set_state)
encapsulation = __builtin__.property(_get_encapsulation, _set_encapsulation)
inter_instance_policies = __builtin__.property(
_get_inter_instance_policies, _set_inter_instance_policies
)
table_connections = __builtin__.property(
_get_table_connections, _set_table_connections
)
interfaces = __builtin__.property(_get_interfaces, _set_interfaces)
tables = __builtin__.property(_get_tables, _set_tables)
connection_points = __builtin__.property(
_get_connection_points, _set_connection_points
)
mpls = __builtin__.property(_get_mpls, _set_mpls)
segment_routing = __builtin__.property(_get_segment_routing, _set_segment_routing)
vlans = __builtin__.property(_get_vlans, _set_vlans)
policy_forwarding = __builtin__.property(
_get_policy_forwarding, _set_policy_forwarding
)
afts = __builtin__.property(_get_afts, _set_afts)
protocols = __builtin__.property(_get_protocols, _set_protocols)
_pyangbind_elements = OrderedDict(
[
("name", name),
("fdb", fdb),
("config", config),
("state", state),
("encapsulation", encapsulation),
("inter_instance_policies", inter_instance_policies),
("table_connections", table_connections),
("interfaces", interfaces),
("tables", tables),
("connection_points", connection_points),
("mpls", mpls),
("segment_routing", segment_routing),
("vlans", vlans),
("policy_forwarding", policy_forwarding),
("afts", afts),
("protocols", protocols),
]
)
| 40.376891
| 428
| 0.610632
| 11,972
| 117,416
| 5.748079
| 0.021049
| 0.081522
| 0.052488
| 0.058562
| 0.994551
| 0.991397
| 0.991397
| 0.991397
| 0.991397
| 0.991397
| 0
| 0.000461
| 0.297966
| 117,416
| 2,907
| 429
| 40.390781
| 0.834381
| 0.20332
| 0
| 0.896364
| 0
| 0.014545
| 0.269382
| 0.100392
| 0
| 0
| 0
| 0
| 0
| 1
| 0.045455
| false
| 0
| 0.020455
| 0
| 0.101364
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 7
|
3940dab78dcf0eef1772d62ae19d5dc3bf9fff62
| 2,874
|
py
|
Python
|
py/average_flpth_predictions.py
|
MarconiS/hiPyRneon
|
e52245e3a71716fe09d8cb3833a4458210dff8cf
|
[
"MIT"
] | 1
|
2021-02-11T18:22:36.000Z
|
2021-02-11T18:22:36.000Z
|
py/average_flpth_predictions.py
|
MarconiS/hiPyRneon
|
e52245e3a71716fe09d8cb3833a4458210dff8cf
|
[
"MIT"
] | null | null | null |
py/average_flpth_predictions.py
|
MarconiS/hiPyRneon
|
e52245e3a71716fe09d8cb3833a4458210dff8cf
|
[
"MIT"
] | null | null | null |
#!/usr/bin/env python3
# -*- coding: utf-8 -*-
"""
Created on Mon Jul 13 23:45:13 2020
@author: sergiomarconi
"""
import rasterio
import geopandas as gpd
import pandas as pd
import rasterstats
import numpy as np
from rasterstats import zonal_stats
from rasterio.plot import show
import os
tr_pt = "/Users/sergiomarconi/Dropbox (UFL)/Traits_Maps/TALL/ITCs/eval/"
arr = os.listdir(tr_pt)
gpd1 = gpd.read_file("/Users/sergiomarconi/Documents/Data/TOH/OSBS_N.shp")
# we'll make a string column for the wKT geom
gpd1['WKT'] = gpd1['geometry'].apply(lambda x: str(x))
grouped_gdf = gpd1.groupby('WKT').mean().reset_index()
grouped_gdf.to_csv("/Users/sergiomarconi/Documents/Data/TOH/csv/OSBS_N.csv")
gpd1 = gpd.read_file("/Users/sergiomarconi/Documents/Data/TOH/OSBS_C.shp")
# we'll make a string column for the wKT geom
gpd1['WKT'] = gpd1['geometry'].apply(lambda x: str(x))
grouped_gdf = gpd1.groupby('WKT').mean().reset_index()
grouped_gdf.to_csv("/Users/sergiomarconi/Documents/Data/TOH/csv/OSBS_C.csv")
gpd1 = gpd.read_file("/Users/sergiomarconi/Documents/Data/TOH/OSBS_P.shp")
# we'll make a string column for the wKT geom
gpd1['WKT'] = gpd1['geometry'].apply(lambda x: str(x))
grouped_gdf = gpd1.groupby('WKT').mean().reset_index()
grouped_gdf.to_csv("/Users/sergiomarconi/Documents/Data/TOH/csv/OSBS_P.csv")
gpd1 = gpd.read_file("/Users/sergiomarconi/Documents/Data/TOH/OSBS_LMA.shp")
# we'll make a string column for the wKT geom
gpd1['WKT'] = gpd1['geometry'].apply(lambda x: str(x))
grouped_gdf = gpd1.groupby('WKT').mean().reset_index()
grouped_gdf.to_csv("/Users/sergiomarconi/Documents/Data/TOH/csv/OSBS_LMA.csv")
gpd1 = gpd.read_file("/Users/sergiomarconi/Documents/Data/TOH/TALL_N.shp")
# we'll make a string column for the wKT geom
gpd1['WKT'] = gpd1['geometry'].apply(lambda x: str(x))
grouped_gdf = gpd1.groupby('WKT').mean().reset_index()
grouped_gdf.to_csv("/Users/sergiomarconi/Documents/Data/TOH/csv/TALL_N.csv")
gpd1 = gpd.read_file("/Users/sergiomarconi/Documents/Data/TOH/TALL_C.shp")
# we'll make a string column for the wKT geom
gpd1['WKT'] = gpd1['geometry'].apply(lambda x: str(x))
grouped_gdf = gpd1.groupby('WKT').mean().reset_index()
grouped_gdf.to_csv("/Users/sergiomarconi/Documents/Data/TOH/csv/TALL_C.csv")
gpd1 = gpd.read_file("/Users/sergiomarconi/Documents/Data/TOH/TALL_P.shp")
# we'll make a string column for the wKT geom
gpd1['WKT'] = gpd1['geometry'].apply(lambda x: str(x))
grouped_gdf = gpd1.groupby('WKT').mean().reset_index()
grouped_gdf.to_csv("/Users/sergiomarconi/Documents/Data/TOH/csv/TALL_P.csv")
gpd1 = gpd.read_file("/Users/sergiomarconi/Documents/Data/TOH/TALL_LMA.shp")
# we'll make a string column for the wKT geom
gpd1['WKT'] = gpd1['geometry'].apply(lambda x: str(x))
grouped_gdf = gpd1.groupby('WKT').mean().reset_index()
grouped_gdf.to_csv("/Users/sergiomarconi/Documents/Data/TOH/csv/TALL_LMA.csv")
| 39.369863
| 78
| 0.747738
| 478
| 2,874
| 4.370293
| 0.167364
| 0.146482
| 0.206798
| 0.237434
| 0.857348
| 0.857348
| 0.857348
| 0.857348
| 0.857348
| 0.857348
| 0
| 0.017484
| 0.084551
| 2,874
| 73
| 79
| 39.369863
| 0.776511
| 0.158316
| 0
| 0.380952
| 0
| 0
| 0.421973
| 0.374948
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| false
| 0
| 0.190476
| 0
| 0.190476
| 0
| 0
| 0
| 0
| null | 0
| 1
| 1
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 8
|
1a34e8f04735da20b2d564329f0ca33519c082d6
| 3,684
|
py
|
Python
|
serial_scripts/perf/dpdk/test_dpdk_perf.py
|
vkolli/contrail-test-perf
|
db04b8924a2c330baabe3059788b149d957a7d67
|
[
"Apache-2.0"
] | 1
|
2017-06-13T04:42:34.000Z
|
2017-06-13T04:42:34.000Z
|
serial_scripts/perf/dpdk/test_dpdk_perf.py
|
vkolli/contrail-test-perf
|
db04b8924a2c330baabe3059788b149d957a7d67
|
[
"Apache-2.0"
] | null | null | null |
serial_scripts/perf/dpdk/test_dpdk_perf.py
|
vkolli/contrail-test-perf
|
db04b8924a2c330baabe3059788b149d957a7d67
|
[
"Apache-2.0"
] | null | null | null |
from base import PerfBaseDpdk
import time
from tcutils.wrappers import preposttest_wrapper
import test
class PerfDpdkTest(PerfBaseDpdk):
@classmethod
def setUpClass(cls):
super(PerfDpdkTest, cls).setUpClass()
# @preposttest_wrapper
# def test_perfdpdk_tcp_vm_to_vm_differ_compute(self):
# return self.run_perf_tests('THROUGHPUT','different','TCP','v4')
# @preposttest_wrapper
# def test_perfdpdk_tcp_vm_to_vm_same_compute(self):
# return self.run_perf_tests('THROUGHPUT','same','TCP','v4')
# @preposttest_wrapper
# def test_ixia_perfdpdk_tcp_vm_to_vm_compute_2_1si(self):
# return self.run_ixia_perf_tests('THROUGHPUT','different','TCP','v4',2,1)
@preposttest_wrapper
def test_ixia_perfdpdk_tcp_vm_to_vm_compute_4_1si(self):
return self.run_ixia_perf_tests('THROUGHPUT','different','TCP','v4',4,1)
@preposttest_wrapper
def test_ixia_perfdpdk_tcp_vm_to_vm_compute_8_1si(self):
return self.run_ixia_perf_tests('THROUGHPUT','different','TCP','v4',8,1)
# @preposttest_wrapper
# def test_ixia_perfdpdk_tcp_vm_to_vm_compute_2_2si(self):
# return self.run_ixia_perf_tests('THROUGHPUT','different','TCP','v4',2,2)
@preposttest_wrapper
def test_ixia_perfdpdk_tcp_vm_to_vm_compute_4_2si(self):
return self.run_ixia_perf_tests('THROUGHPUT','different','TCP','v4',4,2)
# @preposttest_wrapper
# def test_ixia_perfdpdk_tcp_vm_to_vm_compute_2_4si(self):
# return self.run_ixia_perf_tests('THROUGHPUT','different','TCP','v4',2,4)
"""
@preposttest_wrapper
def test_perfdpdk_udp_vm_to_vm_differ_compute(self):
return self.run_perf_tests('THROUGHPUT','different','UDP','v4')
@preposttest_wrapper
def test_perfdpdk_udp_vm_to_vm_same_compute(self):
return self.run_perf_tests('THROUGHPUT','same','UDP','v4')
@preposttest_wrapper
def test_latdpdk_tcp_vm_to_vm_differ_compute(self):
return self.run_perf_tests('LATENCY','different','TCP','v4')
@preposttest_wrapper
def test_latdpdk_udp_vm_to_vm_differ_compute(self):
return self.run_perf_tests('LATENCY','different','UDP','v4')
@preposttest_wrapper
def test_latdpdk_tcp_vm_to_vm_same_compute(self):
return self.run_perf_tests('LATENCY','same','TCP','v4')
@preposttest_wrapper
def test_latdpdk_udp_vm_to_vm_same_compute(self):
return self.run_perf_tests('LATENCY','same','UDP','v4')
@preposttest_wrapper
def test_perf6dpdk_tcp_vm_to_vm_differ_compute(self):
return self.run_perf_tests('THROUGHPUT','different','TCP','v6')
@preposttest_wrapper
def test_perf6dpdk_udp_vm_to_vm_differ_compute(self):
return self.run_perf_tests('THROUGHPUT','different','UDP','v6')
@preposttest_wrapper
def test_perf6dpdk_tcp_vm_to_vm_same_compute(self):
return self.run_perf_tests('THROUGHPUT','same','TCP','v6')
@preposttest_wrapper
def test_perf6dpdk_udp_vm_to_vm_same_compute(self):
return self.run_perf_tests('THROUGHPUT','same','UDP','v6')
@preposttest_wrapper
def test_lat6dpdk_tcp_vm_to_vm_differ_compute(self):
return self.run_perf_tests('LATENCY','different','TCP','v6')
@preposttest_wrapper
def test_lat6dpdk_udp_vm_to_vm_differ_compute(self):
return self.run_perf_tests('LATENCY','different','UDP','v6')
@preposttest_wrapper
def test_lat6dpdk_tcp_vm_to_vm_same_compute(self):
return self.run_perf_tests('LATENCY','same','TCP','v6')
@preposttest_wrapper
def test_lat6dpdk_udp_vm_to_vm_same_compute(self):
return self.run_perf_tests('LATENCY','same','UDP','v6')
"""
#end PerfDpdkTest
| 33.798165
| 81
| 0.734799
| 519
| 3,684
| 4.755299
| 0.090559
| 0.167747
| 0.187196
| 0.222853
| 0.919773
| 0.919773
| 0.919773
| 0.915721
| 0.915721
| 0.834684
| 0
| 0.017137
| 0.14468
| 3,684
| 108
| 82
| 34.111111
| 0.766106
| 0.216884
| 0
| 0.176471
| 0
| 0
| 0.095238
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0.235294
| false
| 0
| 0.235294
| 0.176471
| 0.705882
| 0
| 0
| 0
| 0
| null | 0
| 1
| 1
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 1
| 1
| 0
|
0
| 11
|
1a3bec87c370ece78d6381a1fe247955d34720b5
| 32,128
|
py
|
Python
|
tests/test_tools.py
|
ondiekisteven/pyiso8583
|
b90b8b3c0e141baf81ffb658ddc782bad66dde73
|
[
"MIT"
] | 17
|
2020-09-10T08:24:58.000Z
|
2022-03-08T01:33:32.000Z
|
tests/test_tools.py
|
ondiekisteven/pyiso8583
|
b90b8b3c0e141baf81ffb658ddc782bad66dde73
|
[
"MIT"
] | 13
|
2020-11-21T01:46:14.000Z
|
2022-02-14T06:20:32.000Z
|
tests/test_tools.py
|
ondiekisteven/pyiso8583
|
b90b8b3c0e141baf81ffb658ddc782bad66dde73
|
[
"MIT"
] | 7
|
2020-10-09T20:25:56.000Z
|
2022-02-18T13:54:34.000Z
|
import copy
from io import StringIO
import iso8583
import iso8583.specs
import pytest
from iso8583.tools import _wrap_bytes_repr, _wrap_str_repr
spec = copy.deepcopy(iso8583.specs.default)
def test_pp(capsys):
# fmt: off
spec["h"]["max_len"] = 6
spec["h"]["len_type"] = 0
doc_dec = {}
iso8583.pp(doc_dec, spec)
captured = capsys.readouterr()
r = captured.out.split("\n")
assert r[0] == ""
assert len(r) == 1
doc_dec["h"] = "header"
doc_dec["t"] = "0200"
doc_dec["2"] = "12345678"
doc_dec["44"] = "123"
doc_dec["123"] = "123"
_, doc_enc = iso8583.encode(doc_dec, spec)
iso8583.pp(doc_dec, spec)
captured = capsys.readouterr()
r = captured.out.split("\n")
assert r[0] == "h Message Header : 'header'"
assert r[1] == "t Message Type : '0200'"
assert r[2] == "p Bitmap, Primary : 'C000000000100000'"
assert r[3] == "1 Bitmap, Secondary : '0000000000000020'"
assert r[4] == "2 Primary Account Number (PAN) : '12345678'"
assert r[5] == "44 Additional Response Data : '123'"
assert r[6] == "123 Reserved for Private Use : '123'"
assert r[7] == ""
assert len(r) == 8
iso8583.pp(doc_enc, spec)
captured = capsys.readouterr()
r = captured.out.split("\n")
assert r[0] == "h Message Header : b'header'"
assert r[1] == "t Message Type : b'0200'"
assert r[2] == "p Bitmap, Primary : b'\\xc0\\x00\\x00\\x00\\x00\\x10\\x00\\x00'"
assert r[3] == "1 Bitmap, Secondary : b'\\x00\\x00\\x00\\x00\\x00\\x00\\x00 '"
assert r[4] == "2 Primary Account Number (PAN) : b'08' b'12345678'"
assert r[5] == "44 Additional Response Data : b'03' b'123'"
assert r[6] == "123 Reserved for Private Use : b'003' b'123'"
assert r[7] == ""
assert len(r) == 8
# fmt: on
def test_pp_variable_header(capsys):
# fmt: off
spec["h"]["max_len"] = 6
spec["h"]["len_type"] = 2
doc_dec = {}
doc_dec["h"] = "header"
doc_dec["t"] = "0200"
doc_dec["2"] = "12345678"
doc_dec["44"] = "123"
doc_dec["123"] = "123"
_, doc_enc = iso8583.encode(doc_dec, spec)
iso8583.pp(doc_dec, spec)
captured = capsys.readouterr()
r = captured.out.split("\n")
assert r[0] == "h Message Header : 'header'"
assert r[1] == "t Message Type : '0200'"
assert r[2] == "p Bitmap, Primary : 'C000000000100000'"
assert r[3] == "1 Bitmap, Secondary : '0000000000000020'"
assert r[4] == "2 Primary Account Number (PAN) : '12345678'"
assert r[5] == "44 Additional Response Data : '123'"
assert r[6] == "123 Reserved for Private Use : '123'"
assert r[7] == ""
assert len(r) == 8
iso8583.pp(doc_enc, spec)
captured = capsys.readouterr()
r = captured.out.split("\n")
assert r[0] == "h Message Header : b'06' b'header'"
assert r[1] == "t Message Type : b'0200'"
assert r[2] == "p Bitmap, Primary : b'\\xc0\\x00\\x00\\x00\\x00\\x10\\x00\\x00'"
assert r[3] == "1 Bitmap, Secondary : b'\\x00\\x00\\x00\\x00\\x00\\x00\\x00 '"
assert r[4] == "2 Primary Account Number (PAN) : b'08' b'12345678'"
assert r[5] == "44 Additional Response Data : b'03' b'123'"
assert r[6] == "123 Reserved for Private Use : b'003' b'123'"
assert r[7] == ""
assert len(r) == 8
doc_dec["h"] = ""
_, doc_enc = iso8583.encode(doc_dec, spec)
iso8583.pp(doc_dec, spec)
captured = capsys.readouterr()
r = captured.out.split("\n")
assert r[0] == "h Message Header : ''"
assert r[1] == "t Message Type : '0200'"
assert r[2] == "p Bitmap, Primary : 'C000000000100000'"
assert r[3] == "1 Bitmap, Secondary : '0000000000000020'"
assert r[4] == "2 Primary Account Number (PAN) : '12345678'"
assert r[5] == "44 Additional Response Data : '123'"
assert r[6] == "123 Reserved for Private Use : '123'"
assert r[7] == ""
assert len(r) == 8
iso8583.pp(doc_enc, spec)
captured = capsys.readouterr()
r = captured.out.split("\n")
assert r[0] == "h Message Header : b'00' b''"
assert r[1] == "t Message Type : b'0200'"
assert r[2] == "p Bitmap, Primary : b'\\xc0\\x00\\x00\\x00\\x00\\x10\\x00\\x00'"
assert r[3] == "1 Bitmap, Secondary : b'\\x00\\x00\\x00\\x00\\x00\\x00\\x00 '"
assert r[4] == "2 Primary Account Number (PAN) : b'08' b'12345678'"
assert r[5] == "44 Additional Response Data : b'03' b'123'"
assert r[6] == "123 Reserved for Private Use : b'003' b'123'"
assert r[7] == ""
assert len(r) == 8
# fmt: on
def test_pp_stream():
# fmt: off
spec["h"]["max_len"] = 6
spec["h"]["len_type"] = 0
doc_dec = {}
doc_dec["h"] = "header"
doc_dec["t"] = "0200"
doc_dec["2"] = "12345678"
doc_dec["44"] = "123"
doc_dec["123"] = "123"
_, doc_enc = iso8583.encode(doc_dec, spec)
sio = StringIO()
iso8583.pp(doc_dec, spec, stream=sio)
r = sio.getvalue().split("\n")
assert r[0] == "h Message Header : 'header'"
assert r[1] == "t Message Type : '0200'"
assert r[2] == "p Bitmap, Primary : 'C000000000100000'"
assert r[3] == "1 Bitmap, Secondary : '0000000000000020'"
assert r[4] == "2 Primary Account Number (PAN) : '12345678'"
assert r[5] == "44 Additional Response Data : '123'"
assert r[6] == "123 Reserved for Private Use : '123'"
assert r[7] == ""
assert len(r) == 8
sio = StringIO()
iso8583.pp(doc_enc, spec, stream=sio)
r = sio.getvalue().split("\n")
assert r[0] == "h Message Header : b'header'"
assert r[1] == "t Message Type : b'0200'"
assert r[2] == "p Bitmap, Primary : b'\\xc0\\x00\\x00\\x00\\x00\\x10\\x00\\x00'"
assert r[3] == "1 Bitmap, Secondary : b'\\x00\\x00\\x00\\x00\\x00\\x00\\x00 '"
assert r[4] == "2 Primary Account Number (PAN) : b'08' b'12345678'"
assert r[5] == "44 Additional Response Data : b'03' b'123'"
assert r[6] == "123 Reserved for Private Use : b'003' b'123'"
assert r[7] == ""
assert len(r) == 8
# fmt: on
def test_pp_optional_fields():
# fmt: off
spec["h"]["max_len"] = 6
spec["h"]["len_type"] = 0
# Empty
doc_dec = {}
sio = StringIO()
iso8583.pp(doc_dec, spec, stream=sio)
r = sio.getvalue().split("\n")
assert r[0] == ""
assert len(r) == 1
# Add header
doc_dec = {}
doc_dec["h"] = "header"
sio = StringIO()
iso8583.pp(doc_dec, spec, stream=sio)
r = sio.getvalue().split("\n")
assert r[0] == "h Message Header : 'header'"
assert r[1] == ""
assert len(r) == 2
# Add header, type
doc_dec = {}
doc_dec["h"] = "header"
doc_dec["t"] = "0200"
sio = StringIO()
iso8583.pp(doc_dec, spec, stream=sio)
r = sio.getvalue().split("\n")
assert r[0] == "h Message Header : 'header'"
assert r[1] == "t Message Type : '0200'"
assert r[2] == ""
assert len(r) == 3
# Add header, type, field 2
doc_dec = {}
doc_dec["h"] = "header"
doc_dec["t"] = "0200"
doc_dec["2"] = "12345678"
sio = StringIO()
iso8583.pp(doc_dec, spec, stream=sio)
r = sio.getvalue().split("\n")
assert r[0] == "h Message Header : 'header'"
assert r[1] == "t Message Type : '0200'"
assert r[2] == "2 Primary Account Number (PAN) : '12345678'"
assert r[3] == ""
assert len(r) == 4
# Add header, type, field 123 + encode
doc_dec = {}
doc_dec["h"] = "header"
doc_dec["t"] = "0200"
doc_dec["123"] = "123"
_, doc_enc = iso8583.encode(doc_dec, spec)
sio = StringIO()
iso8583.pp(doc_dec, spec, stream=sio)
r = sio.getvalue().split("\n")
assert r[0] == "h Message Header : 'header'"
assert r[1] == "t Message Type : '0200'"
assert r[2] == "p Bitmap, Primary : '8000000000000000'"
assert r[3] == "1 Bitmap, Secondary : '0000000000000020'"
assert r[4] == "123 Reserved for Private Use : '123'"
assert r[5] == ""
assert len(r) == 6
sio = StringIO()
iso8583.pp(doc_enc, spec, stream=sio)
r = sio.getvalue().split("\n")
assert r[0] == "h Message Header : b'header'"
assert r[1] == "t Message Type : b'0200'"
assert r[2] == "p Bitmap, Primary : b'\\x80\\x00\\x00\\x00\\x00\\x00\\x00\\x00'"
assert r[3] == "1 Bitmap, Secondary : b'\\x00\\x00\\x00\\x00\\x00\\x00\\x00 '"
assert r[4] == "123 Reserved for Private Use : b'003' b'123'"
assert r[5] == ""
assert len(r) == 6
# fmt: on
def test_pp_header_present_but_not_in_spec():
# fmt: off
spec["h"]["max_len"] = 0
spec["h"]["len_type"] = 0
# Empty
doc_dec = {}
sio = StringIO()
iso8583.pp(doc_dec, spec, stream=sio)
r = sio.getvalue().split("\n")
assert r[0] == ""
assert len(r) == 1
# Add header
doc_dec = {}
doc_dec["h"] = "header"
sio = StringIO()
iso8583.pp(doc_dec, spec, stream=sio)
r = sio.getvalue().split("\n")
assert r[0] == ""
assert len(r) == 1
# Add header, type
doc_dec = {}
doc_dec["h"] = "header"
doc_dec["t"] = "0200"
sio = StringIO()
iso8583.pp(doc_dec, spec, stream=sio)
r = sio.getvalue().split("\n")
assert r[0] == "t Message Type : '0200'"
assert r[1] == ""
assert len(r) == 2
# fmt: on
def test_pp_no_desc():
# fmt: off
spec["h"]["max_len"] = 0
spec["h"]["len_type"] = 0
doc_dec = {}
doc_dec["t"] = "0200"
doc_dec["2"] = "12345678"
doc_dec["44"] = "123"
doc_dec["123"] = "123"
_, doc_enc = iso8583.encode(doc_dec, spec)
sio = StringIO()
iso8583.pp(doc_dec, spec, 0, stream=sio)
r = sio.getvalue().split("\n")
assert r[0] == "t : '0200'"
assert r[1] == "p : 'C000000000100000'"
assert r[2] == "1 : '0000000000000020'"
assert r[3] == "2 : '12345678'"
assert r[4] == "44 : '123'"
assert r[5] == "123: '123'"
assert r[6] == ""
assert len(r) == 7
sio = StringIO()
iso8583.pp(doc_enc, spec, 0, stream=sio)
r = sio.getvalue().split("\n")
assert r[0] == "t : b'0200'"
assert r[1] == "p : b'\\xc0\\x00\\x00\\x00\\x00\\x10\\x00\\x00'"
assert r[2] == "1 : b'\\x00\\x00\\x00\\x00\\x00\\x00\\x00 '"
assert r[3] == "2 : b'08' b'12345678'"
assert r[4] == "44 : b'03' b'123'"
assert r[5] == "123: b'003' b'123'"
assert r[6] == ""
assert len(r) == 7
# fmt: on
def test_pp_folding():
# fmt: off
spec["h"]["max_len"] = 0
spec["h"]["len_type"] = 0
doc_dec = {}
doc_dec["t"] = "0200"
doc_dec["123"] = "123456789012345678901234567890123456789012345678901234567890"
_, doc_enc = iso8583.encode(doc_dec, spec)
# standard width = 80
sio = StringIO()
iso8583.pp(doc_dec, spec, stream=sio, line_width=80)
r = sio.getvalue().split("\n")
assert r[0] == "t Message Type : '0200'"
assert r[1] == "p Bitmap, Primary : '8000000000000000'"
assert r[2] == "1 Bitmap, Secondary : '0000000000000020'"
assert r[3] == "123 Reserved for Private Use : '123456789012345678901234567890123456789012'"
assert r[4] == " '345678901234567890'"
assert r[5] == ""
assert len(r) == 6
sio = StringIO()
iso8583.pp(doc_enc, spec, stream=sio, line_width=80)
r = sio.getvalue().split("\n")
assert r[0] == "t Message Type : b'0200'"
assert r[1] == "p Bitmap, Primary : b'\\x80\\x00\\x00\\x00\\x00\\x00\\x00\\x00'"
assert r[2] == "1 Bitmap, Secondary : b'\\x00\\x00\\x00\\x00\\x00\\x00\\x00 '"
assert r[3] == "123 Reserved for Private Use : b'060' b'1234567890123456789012345678901234'"
assert r[4] == " b'56789012345678901234567890'"
assert r[5] == ""
assert len(r) == 6
# reduced width = 80
sio = StringIO()
iso8583.pp(doc_dec, spec, stream=sio, line_width=60)
r = sio.getvalue().split("\n")
assert r[0] == "t Message Type : '0200'"
assert r[1] == "p Bitmap, Primary : '8000000000000000'"
assert r[2] == "1 Bitmap, Secondary : '0000000000000020'"
assert r[3] == "123 Reserved for Private Use : '1234567890123456789012'"
assert r[4] == " '3456789012345678901234'"
assert r[5] == " '5678901234567890'"
assert r[6] == ""
assert len(r) == 7
sio = StringIO()
iso8583.pp(doc_enc, spec, stream=sio, line_width=60)
r = sio.getvalue().split("\n")
assert r[0] == "t Message Type : b'0200'"
assert r[1] == "p Bitmap, Primary : b'\\x80\\x00\\x00\\x00\\x00'"
assert r[2] == " b'\\x00\\x00\\x00'"
assert r[3] == "1 Bitmap, Secondary : b'\\x00\\x00\\x00\\x00\\x00'"
assert r[4] == " b'\\x00\\x00 '"
assert r[5] == "123 Reserved for Private Use : b'060' b'12345678901234'"
assert r[6] == " b'56789012345678'"
assert r[7] == " b'90123456789012'"
assert r[8] == " b'34567890123456'"
assert r[9] == " b'7890'"
assert r[10] == ""
assert len(r) == 11
# even chunks
sio = StringIO()
iso8583.pp(doc_dec, spec, stream=sio, line_width=68)
r = sio.getvalue().split("\n")
assert r[0] == "t Message Type : '0200'"
assert r[1] == "p Bitmap, Primary : '8000000000000000'"
assert r[2] == "1 Bitmap, Secondary : '0000000000000020'"
assert r[3] == "123 Reserved for Private Use : '123456789012345678901234567890'"
assert r[4] == " '123456789012345678901234567890'"
assert r[5] == ""
assert len(r) == 6
sio = StringIO()
iso8583.pp(doc_enc, spec, stream=sio, line_width=61)
r = sio.getvalue().split("\n")
assert r[0] == "t Message Type : b'0200'"
assert r[1] == "p Bitmap, Primary : b'\\x80\\x00\\x00\\x00\\x00'"
assert r[2] == " b'\\x00\\x00\\x00'"
assert r[3] == "1 Bitmap, Secondary : b'\\x00\\x00\\x00\\x00\\x00'"
assert r[4] == " b'\\x00\\x00 '"
assert r[5] == "123 Reserved for Private Use : b'060' b'123456789012345'"
assert r[6] == " b'678901234567890'"
assert r[7] == " b'123456789012345'"
assert r[8] == " b'678901234567890'"
assert r[9] == ""
assert len(r) == 10
# This is a test scenario where "b''" triggers a fold
doc_dec = {}
doc_dec["t"] = "0200"
doc_dec["123"] = "12"
_, doc_enc = iso8583.encode(doc_dec, spec)
sio = StringIO()
iso8583.pp(doc_enc, spec, stream=sio, line_width=44)
r = sio.getvalue().split("\n")
assert r[0] == "t Message Type : b'0200'"
assert r[1] == "p Bitmap, Primary : b'\\x80'"
assert r[2] == " b'\\x00'"
assert r[3] == " b'\\x00'"
assert r[4] == " b'\\x00'"
assert r[5] == " b'\\x00'"
assert r[6] == " b'\\x00'"
assert r[7] == " b'\\x00'"
assert r[8] == " b'\\x00'"
assert r[9] == "1 Bitmap, Secondary : b'\\x00'"
assert r[10] == " b'\\x00'"
assert r[11] == " b'\\x00'"
assert r[12] == " b'\\x00'"
assert r[13] == " b'\\x00'"
assert r[14] == " b'\\x00'"
assert r[15] == " b'\\x00 '"
assert r[16] == "123 Reserved for Private Use : b'002' b'1'"
assert r[17] == " b'2'"
assert r[18] == ""
assert len(r) == 19
# This is a test scenario where "''" triggers a fold
sio = StringIO()
iso8583.pp(doc_dec, spec, stream=sio, line_width=37)
r = sio.getvalue().split("\n")
assert r[0] == "t Message Type : '0'"
assert r[1] == " '2'"
assert r[2] == " '0'"
assert r[3] == " '0'"
assert r[4] == "p Bitmap, Primary : '8'"
assert r[5] == " '0'"
assert r[6] == " '0'"
assert r[7] == " '0'"
assert r[8] == " '0'"
assert r[9] == " '0'"
assert r[10] == " '0'"
assert r[11] == " '0'"
assert r[12] == " '0'"
assert r[13] == " '0'"
assert r[14] == " '0'"
assert r[15] == " '0'"
assert r[16] == " '0'"
assert r[17] == " '0'"
assert r[18] == " '0'"
assert r[19] == " '0'"
assert r[20] == "1 Bitmap, Secondary : '0'"
assert r[21] == " '0'"
assert r[22] == " '0'"
assert r[23] == " '0'"
assert r[24] == " '0'"
assert r[25] == " '0'"
assert r[26] == " '0'"
assert r[27] == " '0'"
assert r[28] == " '0'"
assert r[29] == " '0'"
assert r[30] == " '0'"
assert r[31] == " '0'"
assert r[32] == " '0'"
assert r[33] == " '0'"
assert r[34] == " '2'"
assert r[35] == " '0'"
assert r[36] == "123 Reserved for Private Use : '1'"
assert r[37] == " '2'"
assert r[38] == ""
assert len(r) == 39
# This is a test scenario where _pprint_bytes does not
# try to fold because the data is <= 1
doc_dec = {}
doc_dec["t"] = "0200"
doc_dec["123"] = ""
_, doc_enc = iso8583.encode(doc_dec, spec)
sio = StringIO()
iso8583.pp(doc_enc, spec, stream=sio, line_width=44)
r = sio.getvalue().split("\n")
assert r[0] == "t Message Type : b'0200'"
assert r[1] == "p Bitmap, Primary : b'\\x80'"
assert r[2] == " b'\\x00'"
assert r[3] == " b'\\x00'"
assert r[4] == " b'\\x00'"
assert r[5] == " b'\\x00'"
assert r[6] == " b'\\x00'"
assert r[7] == " b'\\x00'"
assert r[8] == " b'\\x00'"
assert r[9] == "1 Bitmap, Secondary : b'\\x00'"
assert r[10] == " b'\\x00'"
assert r[11] == " b'\\x00'"
assert r[12] == " b'\\x00'"
assert r[13] == " b'\\x00'"
assert r[14] == " b'\\x00'"
assert r[15] == " b'\\x00 '"
assert r[16] == "123 Reserved for Private Use : b'000' b''"
assert r[17] == ""
assert len(r) == 18
# This is a test scenario where _pprint_str does not
# try to fold because the data is <= 1
sio = StringIO()
iso8583.pp(doc_dec, spec, stream=sio, line_width=37)
r = sio.getvalue().split("\n")
assert r[0] == "t Message Type : '0'"
assert r[1] == " '2'"
assert r[2] == " '0'"
assert r[3] == " '0'"
assert r[4] == "p Bitmap, Primary : '8'"
assert r[5] == " '0'"
assert r[6] == " '0'"
assert r[7] == " '0'"
assert r[8] == " '0'"
assert r[9] == " '0'"
assert r[10] == " '0'"
assert r[11] == " '0'"
assert r[12] == " '0'"
assert r[13] == " '0'"
assert r[14] == " '0'"
assert r[15] == " '0'"
assert r[16] == " '0'"
assert r[17] == " '0'"
assert r[18] == " '0'"
assert r[19] == " '0'"
assert r[20] == "1 Bitmap, Secondary : '0'"
assert r[21] == " '0'"
assert r[22] == " '0'"
assert r[23] == " '0'"
assert r[24] == " '0'"
assert r[25] == " '0'"
assert r[26] == " '0'"
assert r[27] == " '0'"
assert r[28] == " '0'"
assert r[29] == " '0'"
assert r[30] == " '0'"
assert r[31] == " '0'"
assert r[32] == " '0'"
assert r[33] == " '0'"
assert r[34] == " '2'"
assert r[35] == " '0'"
assert r[36] == "123 Reserved for Private Use : ''"
assert r[37] == ""
assert len(r) == 38
# This is a test scenario where _pprint_bytes does not
# try to fold because the data is <= 1
doc_dec = {}
doc_dec["t"] = "0200"
doc_dec["123"] = "1"
_, doc_enc = iso8583.encode(doc_dec, spec)
sio = StringIO()
iso8583.pp(doc_enc, spec, stream=sio, line_width=44)
r = sio.getvalue().split("\n")
assert r[0] == "t Message Type : b'0200'"
assert r[1] == "p Bitmap, Primary : b'\\x80'"
assert r[2] == " b'\\x00'"
assert r[3] == " b'\\x00'"
assert r[4] == " b'\\x00'"
assert r[5] == " b'\\x00'"
assert r[6] == " b'\\x00'"
assert r[7] == " b'\\x00'"
assert r[8] == " b'\\x00'"
assert r[9] == "1 Bitmap, Secondary : b'\\x00'"
assert r[10] == " b'\\x00'"
assert r[11] == " b'\\x00'"
assert r[12] == " b'\\x00'"
assert r[13] == " b'\\x00'"
assert r[14] == " b'\\x00'"
assert r[15] == " b'\\x00 '"
assert r[16] == "123 Reserved for Private Use : b'001' b'1'"
assert r[17] == ""
assert len(r) == 18
# This is a test scenario where _pprint_str does not
# try to fold because the data is <= 1
sio = StringIO()
iso8583.pp(doc_dec, spec, stream=sio, line_width=37)
r = sio.getvalue().split("\n")
assert r[0] == "t Message Type : '0'"
assert r[1] == " '2'"
assert r[2] == " '0'"
assert r[3] == " '0'"
assert r[4] == "p Bitmap, Primary : '8'"
assert r[5] == " '0'"
assert r[6] == " '0'"
assert r[7] == " '0'"
assert r[8] == " '0'"
assert r[9] == " '0'"
assert r[10] == " '0'"
assert r[11] == " '0'"
assert r[12] == " '0'"
assert r[13] == " '0'"
assert r[14] == " '0'"
assert r[15] == " '0'"
assert r[16] == " '0'"
assert r[17] == " '0'"
assert r[18] == " '0'"
assert r[19] == " '0'"
assert r[20] == "1 Bitmap, Secondary : '0'"
assert r[21] == " '0'"
assert r[22] == " '0'"
assert r[23] == " '0'"
assert r[24] == " '0'"
assert r[25] == " '0'"
assert r[26] == " '0'"
assert r[27] == " '0'"
assert r[28] == " '0'"
assert r[29] == " '0'"
assert r[30] == " '0'"
assert r[31] == " '0'"
assert r[32] == " '0'"
assert r[33] == " '0'"
assert r[34] == " '2'"
assert r[35] == " '0'"
assert r[36] == "123 Reserved for Private Use : '1'"
assert r[37] == ""
assert len(r) == 38
# Negative line parameters
doc_dec = {}
doc_dec["t"] = "0200"
doc_dec["123"] = "1"
_, doc_enc = iso8583.encode(doc_dec, spec)
sio = StringIO()
iso8583.pp(doc_enc, spec, stream=sio, desc_width=-99, line_width=-99)
r = sio.getvalue().split("\n")
assert r[0] == "t : b'0'"
assert r[1] == " b'2'"
assert r[2] == " b'0'"
assert r[3] == " b'0'"
assert r[4] == "p : b'\\x80'"
assert r[5] == " b'\\x00'"
assert r[6] == " b'\\x00'"
assert r[7] == " b'\\x00'"
assert r[8] == " b'\\x00'"
assert r[9] == " b'\\x00'"
assert r[10] == " b'\\x00'"
assert r[11] == " b'\\x00'"
assert r[12] == "1 : b'\\x00'"
assert r[13] == " b'\\x00'"
assert r[14] == " b'\\x00'"
assert r[15] == " b'\\x00'"
assert r[16] == " b'\\x00'"
assert r[17] == " b'\\x00'"
assert r[18] == " b'\\x00'"
assert r[19] == " b' '"
assert r[20] == "123: b'001' b'1'"
assert r[21] == ""
assert len(r) == 22
sio = StringIO()
iso8583.pp(doc_dec, spec, stream=sio, desc_width=-99, line_width=-99)
r = sio.getvalue().split("\n")
assert r[0] == "t : '0'"
assert r[1] == " '2'"
assert r[2] == " '0'"
assert r[3] == " '0'"
assert r[4] == "p : '8'"
assert r[5] == " '0'"
assert r[6] == " '0'"
assert r[7] == " '0'"
assert r[8] == " '0'"
assert r[9] == " '0'"
assert r[10] == " '0'"
assert r[11] == " '0'"
assert r[12] == " '0'"
assert r[13] == " '0'"
assert r[14] == " '0'"
assert r[15] == " '0'"
assert r[16] == " '0'"
assert r[17] == " '0'"
assert r[18] == " '0'"
assert r[19] == " '0'"
assert r[20] == "1 : '0'"
assert r[21] == " '0'"
assert r[22] == " '0'"
assert r[23] == " '0'"
assert r[24] == " '0'"
assert r[25] == " '0'"
assert r[26] == " '0'"
assert r[27] == " '0'"
assert r[28] == " '0'"
assert r[29] == " '0'"
assert r[30] == " '0'"
assert r[31] == " '0'"
assert r[32] == " '0'"
assert r[33] == " '0'"
assert r[34] == " '2'"
assert r[35] == " '0'"
assert r[36] == "123: '1'"
assert r[37] == ""
assert len(r) == 38
# fmt: on
def test_pp_invalid_types():
"""Invalid types should simply be printed as repr()
If encoded dictionary does not have required 'len' and 'data'
keys then no data should be printed.
"""
# fmt: off
spec["h"]["max_len"] = 0
spec["h"]["len_type"] = 0
doc_dec = {}
doc_dec["t"] = [1, 2, 3, 4]
doc_dec["123"] = set([1, 2, 3])
# standard width = 80
sio = StringIO()
iso8583.pp(doc_dec, spec, stream=sio, line_width=80)
r = sio.getvalue().split("\n")
assert r[0] == "t Message Type : [1, 2, 3, 4]"
assert r[1] == "123 Reserved for Private Use : {1, 2, 3}"
assert r[2] == ""
assert len(r) == 3
# trigger unkown dispatch with reduced width = 1
sio = StringIO()
iso8583.pp(doc_dec, spec, stream=sio, line_width=1)
r = sio.getvalue().split("\n")
assert r[0] == "t Message Type : [1, 2, 3, 4]"
assert r[1] == "123 Reserved for Private Use : {1, 2, 3}"
assert r[2] == ""
assert len(r) == 3
# invalid encoded data
doc_enc = {}
doc_enc["t"] = {}
doc_enc["1"] = {'len': b'len'}
doc_enc["2"] = {'data': b'data'}
doc_enc["3"] = {'spam': b'eggs'}
sio = StringIO()
iso8583.pp(doc_enc, spec, stream=sio, line_width=80)
r = sio.getvalue().split("\n")
assert r[0] == "t Message Type : b''"
assert r[1] == "1 Bitmap, Secondary : b'len' b''"
assert r[2] == "2 Primary Account Number (PAN) : b'data'"
assert r[3] == "3 Processing Code : b''"
assert r[4] == ""
assert len(r) == 5
# fmt: on
def test_pp_no_yield_on_empty_string(capsys):
for _ in _wrap_bytes_repr(b"", 10):
print("spam")
for _ in _wrap_str_repr("", 10):
print("eggs")
captured = capsys.readouterr()
assert captured.out == ""
| 41.29563
| 101
| 0.403947
| 3,733
| 32,128
| 3.416555
| 0.053844
| 0.215148
| 0.084679
| 0.048299
| 0.874314
| 0.865297
| 0.852125
| 0.844049
| 0.828524
| 0.82335
| 0
| 0.150596
| 0.428318
| 32,128
| 777
| 102
| 41.348777
| 0.543801
| 0.032339
| 0
| 0.814024
| 0
| 0.018293
| 0.463745
| 0.030744
| 0
| 0
| 0
| 0
| 0.655488
| 1
| 0.01372
| false
| 0
| 0.009146
| 0
| 0.022866
| 0.003049
| 0
| 0
| 0
| null | 1
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 9
|
1a6660ef66e470505b2f6489a4c6757325e9e737
| 17,442
|
py
|
Python
|
backend/apps/test_all_url_endpoints.py
|
n-hackert/match4healthcare
|
761248c27b49e568c545c643a72eac9a040649d7
|
[
"MIT"
] | null | null | null |
backend/apps/test_all_url_endpoints.py
|
n-hackert/match4healthcare
|
761248c27b49e568c545c643a72eac9a040649d7
|
[
"MIT"
] | null | null | null |
backend/apps/test_all_url_endpoints.py
|
n-hackert/match4healthcare
|
761248c27b49e568c545c643a72eac9a040649d7
|
[
"MIT"
] | null | null | null |
from django.test import TestCase, Client
from apps.iamstudent.models import Student, AUSBILDUNGS_TYPEN_COLUMNS
from apps.ineedstudent.models import Hospital
from apps.accounts.models import User
from django.contrib import auth
import numpy as np
def generate_random_student(countrycode="DE", plz="14482", i=0, validated_email=False):
m = str(i) + "student@email.de"
pwd = User.objects.make_random_password()
kwd = dict(zip(AUSBILDUNGS_TYPEN_COLUMNS,np.random.choice([True,False],size=len(AUSBILDUNGS_TYPEN_COLUMNS))))
u = User.objects.create(username=m, email=m, is_student=True, validated_email=validated_email)
u.set_password(pwd)
s = Student.objects.create(user=u,
countrycode=countrycode,
plz=plz,
availability_start='{}-{:02d}-{:02d}'.format(2020,3,23),
**kwd
)
u.save()
s.save()
return m, pwd, s.uuid
def generate_random_hospital(countrycode="DE", plz="14482", i=0, datenschutz_zugestimmt=True, validated_email=False):
m = str(i) + "hospital@email.de"
pwd = User.objects.make_random_password()
u = User.objects.create(username=m, email=m, is_hospital=True, validated_email=validated_email)
u.set_password(pwd)
s = Hospital.objects.create(user=u,
countrycode=countrycode,
plz=plz,
ansprechpartner='XY',
sonstige_infos='yeaah',
datenschutz_zugestimmt=datenschutz_zugestimmt,
einwilligung_datenweitergabe=True,
)
u.save()
s.save()
return m, pwd, s.uuid
def generate_staff_user(i=0):
m = str(i) + "staff@email.de"
pwd = User.objects.make_random_password()
u = User.objects.create_superuser(username=m, email=m)
u.set_password(pwd)
u.save()
return m, pwd
class UrlEndpointTestCase(TestCase):
def setUp(self):
self.client = Client(HTTP_USER_AGENT='Mozilla/5.0')
def test_http_get_endpoints(self):
assert self.client.get('/', {}).status_code == 200
assert self.client.get('/about/', {}).status_code == 200
assert self.client.get('/impressum/', {}).status_code == 200
assert self.client.get('/dataprotection/', {}).status_code == 200
assert self.client.get('/legal-questions/', {}).status_code == 200
# Mapview
assert self.client.get('/mapview/', {}).status_code == 200
# Accounts
assert self.client.get('/accounts/signup_student', {}).status_code == 200
assert self.client.get('/accounts/signup_hospital', {}).status_code == 200
assert self.client.get('/accounts/password_reset/', {}).status_code == 200
assert self.client.get('/accounts/login/', {}).status_code == 200
def test_count_url(self):
generate_random_student(validated_email=True)
response = self.client.get('/accounts/count', {})
assert response.status_code == 200
self.assertJSONEqual(
str(response.content, encoding='utf8'),
{'facility_count': 0, 'user_count': 1}
)
generate_random_hospital(validated_email=True)
response = self.client.get('/accounts/count', {})
assert response.status_code == 200
self.assertJSONEqual(
str(response.content, encoding='utf8'),
{'facility_count': 1, 'user_count': 1}
)
def test_student(self):
student_email, student_password, _ = generate_random_student()
assert self.client.post('/accounts/logout/', {}).status_code == 200
response = self.client.post('/accounts/password_reset', {
"email": student_email
}, follow=True)
#print(response.redirect_chain)
assert response.status_code == 200
#TODO why does this not redirect to /accounts/password_reset/done
response = self.client.post('/accounts/validate_email', {
"email": student_email
}, follow=True)
assert "/accounts/login" in response.redirect_chain[0][0]
assert response.status_code == 200
response = self.client.post('/accounts/login/', {
"username": student_email,
"password": student_password,
}, follow=True)
assert auth.get_user(self.client).username == student_email
assert Student.objects.get(user__email=student_email).user.validated_email == False
response = self.client.post('/accounts/validate_email', {
"email": student_email
}, follow=True)
assert response.status_code == 200
assert Student.objects.get(user__email=student_email).user.validated_email
response = self.client.post('/accounts/password_change', {
"email": student_email,
"new_password1": student_password,
"new_password2": student_password
}, follow=True)
#print(response.redirect_chain)
assert response.status_code == 200
#TODO why does this not redirect to /accounts/password_change/done
assert self.client.get('/mapview/', {}).status_code == 200
response = self.client.get('/accounts/profile_redirect', follow=True)
assert "profile_student" in response.redirect_chain[0][0]
assert self.client.get('/accounts/profile_student', {}).status_code == 200
assert self.client.get('/accounts/logout/', {}).status_code == 200
assert auth.get_user(self.client).is_anonymous
response = self.client.post('/accounts/login/', {
"username": student_email,
"password": student_password,
}, follow=True)
assert auth.get_user(self.client).username == student_email
# Test view list of studens without being logged in as student. Should redirect!
response = self.client.get("/ineedstudent/students/DE/14482/0", follow=True)
assert "login" in response.redirect_chain[0][0]
assert response.status_code == 200
# Test admin view when logged in as student. Should redirect
response = self.client.get("/accounts/approve_hospitals", follow=True)
assert "login" in response.redirect_chain[0][0]
assert response.status_code == 200
m1, p1, uuid1 = generate_random_hospital("DE", "14482", 1337)
m2, p2, uuid2 = generate_random_hospital("DE", "10115", 1234)
m3, p3, uuid3 = generate_random_hospital("AT", "4020", 420)
response = self.client.get('/ineedstudent/hospital_view/' + str(uuid1) + "/")
assert response.status_code == 200
response = self.client.get('/ineedstudent/hospitals/DE/14482')
assert response.status_code == 200
assert self.client.get('/accounts/delete_me_ask', {}).status_code == 200
assert self.client.get('/accounts/delete_me', {}).status_code == 200
response = self.client.post('/accounts/login/', {
"username": student_email,
"password": student_password,
}, follow=True)
assert auth.get_user(self.client).is_anonymous
# Only available to logged in users, should redirect
response = self.client.get('/ineedstudent/hospital_view/' + str(uuid1) + "/", follow=True)
assert "login" in response.redirect_chain[0][0]
assert response.status_code == 200
# Only available to logged in users, should redirect
response = self.client.get('/ineedstudent/hospitals/DE/14482', follow=True)
assert "login" in response.redirect_chain[0][0]
assert response.status_code == 200
def test_hospital(self):
hospital_email, hospital_password, uuid = generate_random_hospital()
assert self.client.post('/accounts/logout/', {}).status_code == 200
response = self.client.post('/accounts/password_reset', {
"email": hospital_email
}, follow=True)
#print(response.redirect_chain)
assert response.status_code == 200
#TODO why does this not redirect to /accounts/password_reset/done
response = self.client.post('/accounts/validate_email', {
"email": hospital_email
}, follow=True)
assert "/accounts/login" in response.redirect_chain[0][0]
assert response.status_code == 200
response = self.client.post('/accounts/login/', {
"username": hospital_email,
"password": hospital_password,
}, follow=True)
assert auth.get_user(self.client).username == hospital_email
assert Hospital.objects.get(user__email=hospital_email).user.validated_email == False
response = self.client.post('/accounts/validate_email', {
"email": hospital_email
}, follow=True)
assert response.status_code == 200
assert Hospital.objects.get(user__email=hospital_email).user.validated_email
response = self.client.post('/accounts/password_change', {
"email": hospital_email,
"new_password1": hospital_password,
"new_password2": hospital_password
}, follow=True)
#print(response.redirect_chain)
assert response.status_code == 200
#TODO why does this not redirect to /accounts/password_change/done
assert self.client.get('/mapview/', {}).status_code == 200
#TODO Test Detailansicht for a hospital!
response = self.client.get('/accounts/profile_redirect', follow=True)
assert response.status_code == 200
assert "profile_hospital" in response.redirect_chain[0][0]
assert self.client.get('/accounts/profile_hospital', {}).status_code == 200
assert self.client.get('/accounts/logout/', {}).status_code == 200
assert auth.get_user(self.client).is_anonymous
response = self.client.post('/accounts/login/', {
"username": hospital_email,
"password": hospital_password,
}, follow=True)
assert auth.get_user(self.client).username == hospital_email
# Test view list of students with being logged in as hospital. Should work!
response = self.client.get("/ineedstudent/students/DE/14482/0", follow=True)
assert response.status_code == 200
assert len(response.redirect_chain) == 0
# Test admin view when logged in as hospital. Should redirect
response = self.client.get("/accounts/approve_hospitals", follow=True)
assert "login" in response.redirect_chain[0][0]
assert response.status_code == 200
response = self.client.get('/ineedstudent/hospital_view/' + str(uuid) + "/")
assert response.status_code == 200
response = self.client.get('/ineedstudent/hospitals/DE/14482')
assert response.status_code == 200
m1, p1, uuid1 = generate_random_student("DE", "14482", 1337, validated_email=True)
m2, p2, uuid2 = generate_random_student("DE", "10115", 1234, validated_email=True)
m3, p3, uuid3 = generate_random_student("DE", "10115", 12345, validated_email=False)
m4, p4, uuid4 = generate_random_student("AT", "4020", 420, validated_email=True)
response = self.client.get('/ineedstudent/students/DE/14482/0')
assert "1 Helfer*innen" in str(response.content)
assert response.status_code == 200
response = self.client.get('/ineedstudent/students/DE/14482/50')
assert "2 Helfer*innen" in str(response.content)
assert response.status_code == 200
assert self.client.get('/accounts/delete_me_ask', {}).status_code == 200
assert self.client.get('/accounts/delete_me', {}).status_code == 200
response = self.client.post('/accounts/login/', {
"username": hospital_email,
"password": hospital_password,
}, follow=True)
assert auth.get_user(self.client).is_anonymous
# Test view list of studens without being logged in. Should redirect!
response = self.client.get("/ineedstudent/students/DE/14482/0", follow=True)
assert "login" in response.redirect_chain[0][0]
assert response.status_code == 200
# Test admin view as logged out user. Should redirect
response = self.client.get("/accounts/approve_hospitals", follow=True)
assert "login" in response.redirect_chain[0][0]
assert response.status_code == 200
hospital_email, hospital_password, uuid = generate_random_hospital(datenschutz_zugestimmt=False, i=9999)
response = self.client.post('/accounts/login/', {
"username": hospital_email,
"password": hospital_password,
}, follow=True)
assert Hospital.objects.get(user__email=hospital_email).datenschutz_zugestimmt == False
assert "zustimmung" in response.redirect_chain[1][0]
assert auth.get_user(self.client).username == hospital_email
response = self.client.post('/ineedstudent/zustimmung', {
"datenschutz_zugestimmt": True,
"einwilligung_datenweitergabe": True,
}, follow=True)
assert response.status_code == 200
assert "login_redirect" in response.redirect_chain[0][0]
assert Hospital.objects.get(user__email=hospital_email).datenschutz_zugestimmt == True
def test_sudent_individual_view(self):
staff_email, staff_password = generate_staff_user()
hospital_email, hospital_password, hospital_uuid = generate_random_hospital()
student_email, student_password, student_uuid = generate_random_student()
response = self.client.post('/accounts/login/', {
"username": student_email,
"password": student_password,
}, follow=True)
response = self.client.get('/iamstudent/view_student/' + str(student_uuid), follow=True)
assert response.status_code == 200
assert "/accounts/profile_student" in response.redirect_chain[0][0]
# TOOD: test which emails can be seen here!
response = self.client.post('/accounts/login/', {
"username": staff_email,
"password": staff_password,
}, follow=True)
response = self.client.get('/iamstudent/view_student/' + str(student_uuid))
assert response.status_code == 200
# TOOD: test which emails can be seen here!
response = self.client.post('/accounts/login/', {
"username": hospital_email,
"password": hospital_password,
}, follow=True)
response = self.client.get('/iamstudent/view_student/' + str(student_uuid))
assert response.status_code == 200
def test_admin(self):
staff_email, staff_password = generate_staff_user()
assert self.client.post('/accounts/logout/', {}).status_code == 200
response = self.client.post('/accounts/password_reset', {
"email": staff_email
}, follow=True)
#print(response.redirect_chain)
assert response.status_code == 200
#TODO why does this not redirect to /accounts/password_reset/done
response = self.client.post('/accounts/login/', {
"username": staff_email,
"password": staff_password,
}, follow=True)
assert auth.get_user(self.client).username == staff_email
response = self.client.post('/accounts/password_change', {
"email": staff_email,
"new_password1": staff_password,
"new_password2": staff_password
}, follow=True)
#print(response.redirect_chain)
assert response.status_code == 200
#TODO why does this not redirect to /accounts/password_change/done
assert self.client.get('/mapview/', {}).status_code == 200
#TODO Test Detailansicht for a hospital!
response = self.client.get('/accounts/profile_redirect', follow=True)
assert response.status_code == 200
assert "approve_hospitals" in response.redirect_chain[0][0]
response = self.client.get('/accounts/approve_hospitals', follow=True)
assert response.status_code == 200
assert self.client.get('/accounts/logout/', {}).status_code == 200
assert auth.get_user(self.client).is_anonymous
response = self.client.post('/accounts/login/', {
"username": staff_email,
"password": staff_password,
}, follow=True)
assert auth.get_user(self.client).username == staff_email
# Test view list of studens witbeing logged in as staff user
# Current behavior: Should redirect!
# TODO: discuss what the behavior of this should be!
response = self.client.get("/ineedstudent/students/DE/14482/0", follow=True)
assert "login" in response.redirect_chain[0][0]
assert response.status_code == 200
assert self.client.get('/accounts/delete_me_ask', {}).status_code == 200
assert self.client.get('/accounts/delete_me', {}).status_code == 200
response = self.client.post('/accounts/login/', {
"username": staff_email,
"password": staff_password,
}, follow=True)
assert auth.get_user(self.client).is_anonymous
response = self.client.get("/ineedstudent/students/DE/14482/0", follow=True)
| 42.75
| 117
| 0.642071
| 2,011
| 17,442
| 5.397812
| 0.095475
| 0.082911
| 0.073054
| 0.075173
| 0.826347
| 0.805804
| 0.788945
| 0.769047
| 0.71193
| 0.666145
| 0
| 0.030169
| 0.237931
| 17,442
| 407
| 118
| 42.855037
| 0.786488
| 0.079349
| 0
| 0.65529
| 0
| 0
| 0.150799
| 0.075431
| 0
| 0
| 0
| 0.002457
| 0.34471
| 1
| 0.03413
| false
| 0.133106
| 0.020478
| 0
| 0.068259
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
|
0
| 7
|
1aa364668600c76b77b41f0aef67d47eba6ab5b1
| 342
|
py
|
Python
|
src/genie/libs/parser/iosxe/tests/ShowEnvironment/cli/equal/golden_output2_expected.py
|
balmasea/genieparser
|
d1e71a96dfb081e0a8591707b9d4872decd5d9d3
|
[
"Apache-2.0"
] | 204
|
2018-06-27T00:55:27.000Z
|
2022-03-06T21:12:18.000Z
|
src/genie/libs/parser/iosxe/tests/ShowEnvironment/cli/equal/golden_output2_expected.py
|
balmasea/genieparser
|
d1e71a96dfb081e0a8591707b9d4872decd5d9d3
|
[
"Apache-2.0"
] | 468
|
2018-06-19T00:33:18.000Z
|
2022-03-31T23:23:35.000Z
|
src/genie/libs/parser/iosxe/tests/ShowEnvironment/cli/equal/golden_output2_expected.py
|
balmasea/genieparser
|
d1e71a96dfb081e0a8591707b9d4872decd5d9d3
|
[
"Apache-2.0"
] | 309
|
2019-01-16T20:21:07.000Z
|
2022-03-30T12:56:41.000Z
|
expected_output = {
"slot": {
"P6": {
"sensor": {
"Temp: FC PWM1": {"state": "Fan Speed 45%", "reading": "25 Celsius"}
}
},
"P7": {
"sensor": {
"Temp: FC PWM1": {"state": "Fan Speed 45%", "reading": "25 Celsius"}
}
},
}
}
| 22.8
| 84
| 0.342105
| 27
| 342
| 4.296296
| 0.592593
| 0.172414
| 0.206897
| 0.275862
| 0.810345
| 0.810345
| 0.810345
| 0.810345
| 0.810345
| 0.810345
| 0
| 0.065574
| 0.464912
| 342
| 14
| 85
| 24.428571
| 0.568306
| 0
| 0
| 0.285714
| 0
| 0
| 0.339181
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| false
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| null | 0
| 1
| 1
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 9
|
46fbf6e8f4ba1df06e7ffd5b489947bd2fe55c81
| 8,130
|
py
|
Python
|
old.py
|
IGARDS/marchmadness_study
|
16bd3c4b4336fec6b5a2cbb36cd5aed79cccfd20
|
[
"MIT"
] | null | null | null |
old.py
|
IGARDS/marchmadness_study
|
16bd3c4b4336fec6b5a2cbb36cd5aed79cccfd20
|
[
"MIT"
] | null | null | null |
old.py
|
IGARDS/marchmadness_study
|
16bd3c4b4336fec6b5a2cbb36cd5aed79cccfd20
|
[
"MIT"
] | null | null | null |
def support_map_vectorized1(linked,num_indirect_equal_direct=3):
# columns
# 'team_j', 'team_i_name', 'team_i_score', 'team_i_H_A_N',
# 'team_j_i_score', 'team_j_i_H_A_N', 'game_i_j', 'team_k_name',
# 'team_k_score', 'team_k_H_A_N', 'team_j_k_score', 'team_j_k_H_A_N',
# 'game_k_j'
linked["direct"] = linked["team_i_name"] == linked["team_k_name"]
# | (linked["team_i_name"] == linked["team_j_k_name"]) | (linked["team_k_name"] == linked["team_j_k_name"])
for_index1 = linked[["team_i_name","team_k_name"]].copy()
for_index1.loc[linked["direct"]] = linked.loc[linked["direct"],["team_i_name","team_j_name"]]
for_index1.columns = ["team1","team2"]
for_index2 = linked[["team_k_name","team_i_name"]].copy()
for_index2.loc[linked["direct"]] = linked.loc[linked["direct"],["team_j_name","team_i_name"]]
for_index2.columns = ["team1","team2"]
index_ik = pd.MultiIndex.from_frame(for_index1,sortorder=0)
index_ki = pd.MultiIndex.from_frame(for_index2,sortorder=0)
#######################################
# part to modify
# direct
d_ik = linked['team_i_score'] - linked['team_j_i_score']
direct_thres = 1
support_ik = num_indirect_equal_direct*(linked["direct"] & (d_ik > direct_thres)).astype(int)
support_ki = num_indirect_equal_direct*(linked["direct"] & (d_ik < -direct_thres)).astype(int)
# indirect
d_ij = linked["team_i_score"] - linked["team_j_i_score"]
d_kj = linked["team_k_score"] - linked["team_j_k_score"]
# always a positive and it captures that if i beat j by 5 points and k beat j by 2 points then this spread is 3
spread = np.abs(d_ij - d_kj)
support_ik += ((~linked["direct"]) & (d_ij > 0) & (d_kj > 0) & (d_ij > d_kj) & (spread > 10)).astype(int)
support_ik += ((~linked["direct"]) & (d_ij < 0) & (d_kj < 0) & (d_ij > d_kj) & (spread > 15)).astype(int)
support_ik += ((~linked["direct"]) & (d_ij > 0) & (d_kj < 0) & (spread > 2)).astype(int)
support_ki += ((~linked["direct"]) & (d_kj > 0) & (d_ij > 0) & (d_kj > d_ij) & (spread > 10)).astype(int)
support_ki += ((~linked["direct"]) & (d_kj < 0) & (d_ij < 0) & (d_kj > d_ij) & (spread > 15)).astype(int)
support_ki += ((~linked["direct"]) & (d_kj > 0) & (d_ij < 0) & (spread > 2)).astype(int)
# end part to modify
#######################################
linked["support_ik"]=support_ik
linked["index_ik"]=index_ik
linked["support_ki"]=support_ki
linked["index_ki"]=index_ki
#prepare_ret = linked.drop_duplicates(subset='games', keep='first')[["index_ik","index_ki","support_ik","support_ki"]]
ret1 = linked.set_index(index_ik)["support_ik"]
ret2 = linked.set_index(index_ki)["support_ki"]
ret = ret1.append(ret2)
ret = ret.groupby(level=[0,1]).sum()
return ret
def support_map_vectorized_direct(linked,direct_thres=1):
# columns
# 'team_j', 'team_i_name', 'team_i_score', 'team_i_H_A_N',
# 'team_j_i_score', 'team_j_i_H_A_N', 'game_i_j', 'team_k_name',
# 'team_k_score', 'team_k_H_A_N', 'team_j_k_score', 'team_j_k_H_A_N',
# 'game_k_j'
linked["direct"] = linked["team_i_name"] == linked["team_k_name"]
linked = linked.loc[linked["direct"]].copy()
# | (linked["team_i_name"] == linked["team_j_k_name"]) | (linked["team_k_name"] == linked["team_j_k_name"])
for_index1 = linked[["team_i_name","team_k_name"]].copy()
for_index1.loc[linked["direct"]] = linked.loc[linked["direct"],["team_i_name","team_j_name"]]
for_index1.columns = ["team1","team2"]
for_index2 = linked[["team_k_name","team_i_name"]].copy()
for_index2.loc[linked["direct"]] = linked.loc[linked["direct"],["team_j_name","team_i_name"]]
for_index2.columns = ["team1","team2"]
index_ik = pd.MultiIndex.from_frame(for_index1,sortorder=0)
index_ki = pd.MultiIndex.from_frame(for_index2,sortorder=0)
#######################################
# part to modify
# direct
d_ik = linked['team_i_score'] - linked['team_j_i_score']
support_ik = (linked["direct"] & (d_ik > direct_thres)).astype(int)
support_ki = (linked["direct"] & (d_ik < -direct_thres)).astype(int)
# end part to modify
#######################################
linked["support_ik"]=support_ik
linked["index_ik"]=index_ik
linked["support_ki"]=support_ki
linked["index_ki"]=index_ki
#prepare_ret = linked.drop_duplicates(subset='games', keep='first')[["index_ik","index_ki","support_ik","support_ki"]]
ret1 = linked.set_index(index_ik)["support_ik"]
ret2 = linked.set_index(index_ki)["support_ki"]
ret = ret1.append(ret2)
ret = ret.groupby(level=[0,1]).sum()
return ret
def support_map_vectorized_direct_indirect_weighted(linked,direct_thres=1,spread_thres=0,weight_indirect=0.5,verbose=False):
# columns
# 'team_j', 'team_i_name', 'team_i_score', 'team_i_H_A_N',
# 'team_j_i_score', 'team_j_i_H_A_N', 'game_i_j', 'team_k_name',
# 'team_k_score', 'team_k_H_A_N', 'team_j_k_score', 'team_j_k_H_A_N',
# 'game_k_j'
linked["direct"] = linked["team_i_name"] == linked["team_k_name"]
# | (linked["team_i_name"] == linked["team_j_k_name"]) | (linked["team_k_name"] == linked["team_j_k_name"])
for_index1 = linked[["team_i_name","team_k_name"]].copy()
for_index1.loc[linked["direct"]] = linked.loc[linked["direct"],["team_i_name","team_j_name"]]
for_index1.columns = ["team1","team2"]
for_index2 = linked[["team_k_name","team_i_name"]].copy()
for_index2.loc[linked["direct"]] = linked.loc[linked["direct"],["team_j_name","team_i_name"]]
for_index2.columns = ["team1","team2"]
index_ik = pd.MultiIndex.from_frame(for_index1,sortorder=0)
index_ki = pd.MultiIndex.from_frame(for_index2,sortorder=0)
#######################################
# part to modify
# direct
d_ik = linked['team_i_score'] - linked['team_j_i_score']
support_ik = (linked["direct"] & (d_ik > direct_thres)).astype(int)
support_ki = (linked["direct"] & (d_ik < -direct_thres)).astype(int)
# indirect
d_ij = linked["team_i_score"] - linked["team_j_i_score"]
d_kj = linked["team_k_score"] - linked["team_j_k_score"]
# always a positive and it captures that if i beat j by 5 points and k beat j by 2 points then this spread is 3
spread = np.abs(d_ij - d_kj)
support_ik += weight_indirect*((~linked["direct"]) & (d_ij > 0) & (d_kj < 0) & (spread > spread_thres)).astype(int)
support_ki += weight_indirect*((~linked["direct"]) & (d_kj > 0) & (d_ij < 0) & (spread > spread_thres)).astype(int)
# end part to modify
#######################################
linked["support_ik"]=support_ik
linked["index_ik"]=index_ik
linked["support_ki"]=support_ki
linked["index_ki"]=index_ki
if verbose:
print('Direct')
print("Total:",sum(linked["direct"] & (linked["support_ik"]>0)) + sum(linked["direct"] & (linked["support_ki"]>0)),
"ik:",sum(linked["direct"] & (linked["support_ik"]>0)),
"ki:",sum(linked["direct"] & (linked["support_ki"]>0)))
print('Indirect')
print("Total:",sum((~linked["direct"]) & (linked["support_ik"]>0)) + sum(~linked["direct"] & (linked["support_ki"]>0)),
"ik:",sum((~linked["direct"]) & (linked["support_ik"]>0)),
"ki:",sum(~linked["direct"] & (linked["support_ki"]>0)))
#indices_ik = linked.index[(linked["support_ik"] > linked["support_ki"]) & ~linked['direct']]
#indices_ki = linked.index[(linked["support_ik"] > linked["support_ki"]) & ~linked['direct']]
#linked.loc[~linked['direct']] = 0
#linked.loc[indices_ik] = 0.5*(linked.loc[indices_ik]["support_ik"] - linked.loc[indices_ik]["support_ki"])
#linked.loc[indices_ik] = 0.5*(linked.loc[indices_ik]["support_ik"] - linked.loc[indices_ik]["support_ki"])
ret1 = linked.set_index(index_ik)["support_ik"]
ret2 = linked.set_index(index_ki)["support_ki"]
ret = ret1.append(ret2)
ret = ret.groupby(level=[0,1]).sum()
return ret
| 49.573171
| 127
| 0.625338
| 1,212
| 8,130
| 3.832508
| 0.075908
| 0.111087
| 0.040689
| 0.025188
| 0.947686
| 0.937567
| 0.927449
| 0.927449
| 0.927449
| 0.913671
| 0
| 0.01662
| 0.163715
| 8,130
| 163
| 128
| 49.877301
| 0.666569
| 0.243419
| 0
| 0.736264
| 0
| 0
| 0.18482
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0.032967
| false
| 0
| 0
| 0
| 0.065934
| 0.043956
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 7
|
20087d71412165383a8f2df0550314dcf4752078
| 227
|
py
|
Python
|
src/tensorflow_wavelets/Layers/__init__.py
|
simonsimon006/tensorflow-wavelets
|
21a095bf0048ae2488ca5ae4961d2cbfe94263a9
|
[
"MIT"
] | null | null | null |
src/tensorflow_wavelets/Layers/__init__.py
|
simonsimon006/tensorflow-wavelets
|
21a095bf0048ae2488ca5ae4961d2cbfe94263a9
|
[
"MIT"
] | null | null | null |
src/tensorflow_wavelets/Layers/__init__.py
|
simonsimon006/tensorflow-wavelets
|
21a095bf0048ae2488ca5ae4961d2cbfe94263a9
|
[
"MIT"
] | null | null | null |
from tensorflow_wavelets.Layers.DMWT import DMWT, IDMWT
from tensorflow_wavelets.Layers.DWT import DWT, IDWT
from tensorflow_wavelets.Layers.DTCWT import DTCWT, IDTCWT
from tensorflow_wavelets.Layers.Threshold import Threshold
| 45.4
| 58
| 0.867841
| 31
| 227
| 6.225806
| 0.387097
| 0.290155
| 0.455959
| 0.580311
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.0837
| 227
| 4
| 59
| 56.75
| 0.927885
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| true
| 0
| 1
| 0
| 1
| 0
| 0
| 0
| 0
| null | 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 0
| 1
| 0
|
0
| 7
|
6488b9f9b9bdaaa2eaf613b6233a850281506ab7
| 5,325
|
py
|
Python
|
tests/test_eliminate_whitespaces.py
|
PatrikHlobil/Eliminate-Newlines-After-Function-Definition
|
fa98603d37cb40b98343a1e552b8294a1d4e3e29
|
[
"MIT"
] | 1
|
2020-08-31T10:44:29.000Z
|
2020-08-31T10:44:29.000Z
|
tests/test_eliminate_whitespaces.py
|
PatrikHlobil/Eliminate-Whitespaces-After-Function-Definition
|
fa98603d37cb40b98343a1e552b8294a1d4e3e29
|
[
"MIT"
] | null | null | null |
tests/test_eliminate_whitespaces.py
|
PatrikHlobil/Eliminate-Whitespaces-After-Function-Definition
|
fa98603d37cb40b98343a1e552b8294a1d4e3e29
|
[
"MIT"
] | null | null | null |
from _pytest.config import directory_arg
import pytest
from pytest_check import check
from eliminate_newlines.core import (
eliminate_newlines_after_function_definition_in_file,
eliminate_newlines_after_function_definition_in_file_or_directory,
eliminate_newlines_after_function_definition_in_string,
)
testcode0 = {
"original": """
def foo(a):
return a + 1
a=1+1
""",
"formatted": """
def foo(a):
return a + 1
a=1+1
""",
}
testcode1 = {
"original": """
def foo(a):
return a + 1
a=1+1
""",
"formatted": """
def foo(a):
return a + 1
a=1+1
""",
}
testcode2 = {
"original": """
def test(a):
return a + 1
class A:
def test_this(a,
b,
c):
return a + b + c
""",
"formatted": """
def test(a):
return a + 1
class A:
def test_this(a,
b,
c):
return a + b + c
""",
}
@pytest.mark.parametrize("code", [testcode1, testcode2])
def test_eliminate_newlines_after_function_definition_in_string(code):
assert code["formatted"] == eliminate_newlines_after_function_definition_in_string(
code=code["original"]
)
class TestEliminateNewlinesAfterFunctionDefinitionInFile:
@staticmethod
@pytest.mark.parametrize(
"code,expected_return_value", [[testcode0, 0], [testcode1, 1], [testcode2, 1]]
)
def test_eliminate_newlines_after_function_definition_in_file(
code, expected_return_value, tmp_path
):
filepath = tmp_path / "testfile.py"
filepath.write_text(code["original"])
return_value = eliminate_newlines_after_function_definition_in_file(
path=filepath
)
with check:
assert (
filepath.read_text() == code["formatted"]
), "File not correctly formatted."
with check:
assert return_value == expected_return_value, "Return value is not correct"
@staticmethod
@pytest.mark.parametrize(
"code,expected_return_value", [[testcode0, 0], [testcode1, 1], [testcode2, 1]]
)
def test_eliminate_newlines_after_function_definition_in_file__checkmode(
code, expected_return_value, tmp_path
):
filepath = tmp_path / "testfile.py"
filepath.write_text(code["original"])
return_value = eliminate_newlines_after_function_definition_in_file(
path=filepath, check=True
)
with check:
assert (
filepath.read_text() == code["original"]
), "File should not be touched."
with check:
assert return_value == expected_return_value, "Return value is not correct"
class TestEliminateNewlinesAfterFunctionDefinitionInFileOrDirectory:
@staticmethod
@pytest.mark.parametrize(
"code,expected_return_value", [[testcode0, 0], [testcode1, 1], [testcode2, 1]]
)
def test_eliminate_newlines_after_function_definition_in_file_or_directory__file(
code,
expected_return_value,
tmp_path,
):
filepath = tmp_path / "testfile.py"
filepath.write_text(code["original"])
return_value = (
eliminate_newlines_after_function_definition_in_file_or_directory(
path=filepath
)
)
with check:
assert (
filepath.read_text() == code["formatted"]
), "File not correctly formatted."
with check:
assert return_value == expected_return_value, "Return value is not correct"
@staticmethod
def test_eliminate_newlines_after_function_definition_in_file_or_directory__directory(
tmp_path,
):
directory = tmp_path
config = (
[tmp_path / "testcode0.py", testcode0],
[tmp_path / "testcode1.py", testcode1],
[tmp_path / "testcode2.py", testcode2],
)
for filepath, content in config:
filepath.write_text(content["original"])
return_value = (
eliminate_newlines_after_function_definition_in_file_or_directory(
path=directory
)
)
for filepath, content in config:
with check:
filepath.read_text() == content[
"formatted"
], "File not correctly formatted."
with check:
assert return_value == 1, "Return value is not correct"
@staticmethod
def test_eliminate_newlines_after_function_definition_in_file_or_directory__checkmode(
tmp_path,
):
directory = tmp_path
config = (
[tmp_path / "testcode0.py", testcode0],
[tmp_path / "testcode1.py", testcode1],
[tmp_path / "testcode2.py", testcode2],
)
for filepath, content in config:
filepath.write_text(content["original"])
return_value = (
eliminate_newlines_after_function_definition_in_file_or_directory(
path=directory
)
)
for filepath, content in config:
with check:
filepath.read_text() == content[
"original"
], "File not correctly formatted."
with check:
assert return_value == 1, "Return value is not correct"
| 25.724638
| 90
| 0.611455
| 557
| 5,325
| 5.526032
| 0.111311
| 0.08577
| 0.107212
| 0.146199
| 0.864847
| 0.864847
| 0.864847
| 0.822937
| 0.786875
| 0.768356
| 0
| 0.013593
| 0.295399
| 5,325
| 206
| 91
| 25.849515
| 0.80677
| 0
| 0
| 0.721212
| 0
| 0
| 0.187418
| 0.014648
| 0
| 0
| 0
| 0
| 0.054545
| 1
| 0.036364
| false
| 0
| 0.024242
| 0
| 0.121212
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 7
|
b3e620733673d77d9cb575dc29fbef656f203aab
| 98,269
|
py
|
Python
|
release/src-rt-6.x.4708/router/samba3/source4/dsdb/tests/python/sec_descriptor.py
|
zaion520/ATtomato
|
4d48bb79f8d147f89a568cf18da9e0edc41f93fb
|
[
"FSFAP"
] | 2
|
2019-01-13T09:19:10.000Z
|
2019-02-15T01:21:02.000Z
|
release/src-rt-6.x.4708/router/samba3/source4/dsdb/tests/python/sec_descriptor.py
|
zaion520/ATtomato
|
4d48bb79f8d147f89a568cf18da9e0edc41f93fb
|
[
"FSFAP"
] | null | null | null |
release/src-rt-6.x.4708/router/samba3/source4/dsdb/tests/python/sec_descriptor.py
|
zaion520/ATtomato
|
4d48bb79f8d147f89a568cf18da9e0edc41f93fb
|
[
"FSFAP"
] | 2
|
2020-03-08T01:58:25.000Z
|
2020-12-20T10:34:54.000Z
|
#!/usr/bin/env python
# -*- coding: utf-8 -*-
import optparse
import sys
import os
import base64
import re
import random
sys.path.insert(0, "bin/python")
import samba
samba.ensure_external_module("testtools", "testtools")
samba.ensure_external_module("subunit", "subunit/python")
import samba.getopt as options
# Some error messages that are being tested
from ldb import SCOPE_SUBTREE, SCOPE_BASE, LdbError, ERR_NO_SUCH_OBJECT
# For running the test unit
from samba.ndr import ndr_pack
from samba.dcerpc import security
from samba import gensec, sd_utils
from samba.samdb import SamDB
from samba.credentials import Credentials
from samba.auth import system_session
from samba.dsdb import DS_DOMAIN_FUNCTION_2008
from samba.dcerpc.security import (
SECINFO_OWNER, SECINFO_GROUP, SECINFO_DACL, SECINFO_SACL)
from subunit.run import SubunitTestRunner
import samba.tests
from samba.tests import delete_force
import unittest
parser = optparse.OptionParser("sec_descriptor.py [options] <host>")
sambaopts = options.SambaOptions(parser)
parser.add_option_group(sambaopts)
parser.add_option_group(options.VersionOptions(parser))
# use command line creds if available
credopts = options.CredentialsOptions(parser)
parser.add_option_group(credopts)
opts, args = parser.parse_args()
if len(args) < 1:
parser.print_usage()
sys.exit(1)
host = args[0]
lp = sambaopts.get_loadparm()
creds = credopts.get_credentials(lp)
creds.set_gensec_features(creds.get_gensec_features() | gensec.FEATURE_SEAL)
#
# Tests start here
#
class DescriptorTests(samba.tests.TestCase):
def get_users_domain_dn(self, name):
return "CN=%s,CN=Users,%s" % (name, self.base_dn)
def get_unique_schema_class_name(self):
while True:
class_name = "test-class%s" % random.randint(1,100000)
class_dn = "CN=%s,%s" % (class_name, self.schema_dn)
try:
self.ldb_admin.search(base=class_dn, attrs=["*"])
except LdbError, (num, _):
self.assertEquals(num, ERR_NO_SUCH_OBJECT)
return class_name
def create_schema_class(self, _ldb, object_dn, desc=None):
ldif = """
dn: """ + object_dn + """
objectClass: classSchema
objectCategory: CN=Class-Schema,""" + self.schema_dn + """
defaultObjectCategory: """ + object_dn + """
distinguishedName: """ + object_dn + """
governsID: 1.2.840.""" + str(random.randint(1,100000)) + """.1.5.9939
instanceType: 4
objectClassCategory: 1
subClassOf: organizationalPerson
systemFlags: 16
rDNAttID: cn
systemMustContain: cn
systemOnly: FALSE
"""
if desc:
assert(isinstance(desc, str) or isinstance(desc, security.descriptor))
if isinstance(desc, str):
ldif += "nTSecurityDescriptor: %s" % desc
elif isinstance(desc, security.descriptor):
ldif += "nTSecurityDescriptor:: %s" % base64.b64encode(ndr_pack(desc))
_ldb.add_ldif(ldif)
def create_configuration_container(self, _ldb, object_dn, desc=None):
ldif = """
dn: """ + object_dn + """
objectClass: container
objectCategory: CN=Container,""" + self.schema_dn + """
showInAdvancedViewOnly: TRUE
instanceType: 4
"""
if desc:
assert(isinstance(desc, str) or isinstance(desc, security.descriptor))
if isinstance(desc, str):
ldif += "nTSecurityDescriptor: %s" % desc
elif isinstance(desc, security.descriptor):
ldif += "nTSecurityDescriptor:: %s" % base64.b64encode(ndr_pack(desc))
_ldb.add_ldif(ldif)
def create_configuration_specifier(self, _ldb, object_dn, desc=None):
ldif = """
dn: """ + object_dn + """
objectClass: displaySpecifier
showInAdvancedViewOnly: TRUE
"""
if desc:
assert(isinstance(desc, str) or isinstance(desc, security.descriptor))
if isinstance(desc, str):
ldif += "nTSecurityDescriptor: %s" % desc
elif isinstance(desc, security.descriptor):
ldif += "nTSecurityDescriptor:: %s" % base64.b64encode(ndr_pack(desc))
_ldb.add_ldif(ldif)
def get_ldb_connection(self, target_username, target_password):
creds_tmp = Credentials()
creds_tmp.set_username(target_username)
creds_tmp.set_password(target_password)
creds_tmp.set_domain(creds.get_domain())
creds_tmp.set_realm(creds.get_realm())
creds_tmp.set_workstation(creds.get_workstation())
creds_tmp.set_gensec_features(creds_tmp.get_gensec_features()
| gensec.FEATURE_SEAL)
ldb_target = SamDB(url=host, credentials=creds_tmp, lp=lp)
return ldb_target
def setUp(self):
super(DescriptorTests, self).setUp()
self.ldb_admin = ldb
self.base_dn = ldb.domain_dn()
self.configuration_dn = self.ldb_admin.get_config_basedn().get_linearized()
self.schema_dn = self.ldb_admin.get_schema_basedn().get_linearized()
self.domain_sid = security.dom_sid(self.ldb_admin.get_domain_sid())
self.sd_utils = sd_utils.SDUtils(ldb)
print "baseDN: %s" % self.base_dn
################################################################################################
## Tests for DOMAIN
# Default descriptor tests #####################################################################
class OwnerGroupDescriptorTests(DescriptorTests):
def deleteAll(self):
delete_force(self.ldb_admin, self.get_users_domain_dn("testuser1"))
delete_force(self.ldb_admin, self.get_users_domain_dn("testuser2"))
delete_force(self.ldb_admin, self.get_users_domain_dn("testuser3"))
delete_force(self.ldb_admin, self.get_users_domain_dn("testuser4"))
delete_force(self.ldb_admin, self.get_users_domain_dn("testuser5"))
delete_force(self.ldb_admin, self.get_users_domain_dn("testuser6"))
delete_force(self.ldb_admin, self.get_users_domain_dn("testuser7"))
delete_force(self.ldb_admin, self.get_users_domain_dn("testuser8"))
# DOMAIN
delete_force(self.ldb_admin, self.get_users_domain_dn("test_domain_group1"))
delete_force(self.ldb_admin, "CN=test_domain_user1,OU=test_domain_ou1," + self.base_dn)
delete_force(self.ldb_admin, "OU=test_domain_ou2,OU=test_domain_ou1," + self.base_dn)
delete_force(self.ldb_admin, "OU=test_domain_ou1," + self.base_dn)
# SCHEMA
# CONFIGURATION
delete_force(self.ldb_admin, "CN=test-specifier1,CN=test-container1,CN=DisplaySpecifiers," \
+ self.configuration_dn)
delete_force(self.ldb_admin, "CN=test-container1,CN=DisplaySpecifiers," + self.configuration_dn)
def setUp(self):
super(OwnerGroupDescriptorTests, self).setUp()
self.deleteAll()
### Create users
# User 1 - Enterprise Admins
self.ldb_admin.newuser("testuser1", "samba123@")
# User 2 - Domain Admins
self.ldb_admin.newuser("testuser2", "samba123@")
# User 3 - Schema Admins
self.ldb_admin.newuser("testuser3", "samba123@")
# User 4 - regular user
self.ldb_admin.newuser("testuser4", "samba123@")
# User 5 - Enterprise Admins and Domain Admins
self.ldb_admin.newuser("testuser5", "samba123@")
# User 6 - Enterprise Admins, Domain Admins, Schema Admins
self.ldb_admin.newuser("testuser6", "samba123@")
# User 7 - Domain Admins and Schema Admins
self.ldb_admin.newuser("testuser7", "samba123@")
# User 5 - Enterprise Admins and Schema Admins
self.ldb_admin.newuser("testuser8", "samba123@")
self.ldb_admin.add_remove_group_members("Enterprise Admins",
"testuser1,testuser5,testuser6,testuser8",
add_members_operation=True)
self.ldb_admin.add_remove_group_members("Domain Admins",
"testuser2,testuser5,testuser6,testuser7",
add_members_operation=True)
self.ldb_admin.add_remove_group_members("Schema Admins",
"testuser3,testuser6,testuser7,testuser8",
add_members_operation=True)
self.results = {
# msDS-Behavior-Version < DS_DOMAIN_FUNCTION_2008
"ds_behavior_win2003" : {
"100" : "O:EAG:DU",
"101" : "O:DAG:DU",
"102" : "O:%sG:DU",
"103" : "O:%sG:DU",
"104" : "O:DAG:DU",
"105" : "O:DAG:DU",
"106" : "O:DAG:DU",
"107" : "O:EAG:DU",
"108" : "O:DAG:DA",
"109" : "O:DAG:DA",
"110" : "O:%sG:DA",
"111" : "O:%sG:DA",
"112" : "O:DAG:DA",
"113" : "O:DAG:DA",
"114" : "O:DAG:DA",
"115" : "O:DAG:DA",
"130" : "O:EAG:DU",
"131" : "O:DAG:DU",
"132" : "O:SAG:DU",
"133" : "O:%sG:DU",
"134" : "O:EAG:DU",
"135" : "O:SAG:DU",
"136" : "O:SAG:DU",
"137" : "O:SAG:DU",
"138" : "O:DAG:DA",
"139" : "O:DAG:DA",
"140" : "O:%sG:DA",
"141" : "O:%sG:DA",
"142" : "O:DAG:DA",
"143" : "O:DAG:DA",
"144" : "O:DAG:DA",
"145" : "O:DAG:DA",
"160" : "O:EAG:DU",
"161" : "O:DAG:DU",
"162" : "O:%sG:DU",
"163" : "O:%sG:DU",
"164" : "O:EAG:DU",
"165" : "O:EAG:DU",
"166" : "O:DAG:DU",
"167" : "O:EAG:DU",
"168" : "O:DAG:DA",
"169" : "O:DAG:DA",
"170" : "O:%sG:DA",
"171" : "O:%sG:DA",
"172" : "O:DAG:DA",
"173" : "O:DAG:DA",
"174" : "O:DAG:DA",
"175" : "O:DAG:DA",
},
# msDS-Behavior-Version >= DS_DOMAIN_FUNCTION_2008
"ds_behavior_win2008" : {
"100" : "O:EAG:EA",
"101" : "O:DAG:DA",
"102" : "O:%sG:DU",
"103" : "O:%sG:DU",
"104" : "O:DAG:DA",
"105" : "O:DAG:DA",
"106" : "O:DAG:DA",
"107" : "O:EAG:EA",
"108" : "O:DAG:DA",
"109" : "O:DAG:DA",
"110" : "O:%sG:DA",
"111" : "O:%sG:DA",
"112" : "O:DAG:DA",
"113" : "O:DAG:DA",
"114" : "O:DAG:DA",
"115" : "O:DAG:DA",
"130" : "O:EAG:EA",
"131" : "O:DAG:DA",
"132" : "O:SAG:SA",
"133" : "O:%sG:DU",
"134" : "O:EAG:EA",
"135" : "O:SAG:SA",
"136" : "O:SAG:SA",
"137" : "O:SAG:SA",
"138" : "",
"139" : "",
"140" : "O:%sG:DA",
"141" : "O:%sG:DA",
"142" : "",
"143" : "",
"144" : "",
"145" : "",
"160" : "O:EAG:EA",
"161" : "O:DAG:DA",
"162" : "O:%sG:DU",
"163" : "O:%sG:DU",
"164" : "O:EAG:EA",
"165" : "O:EAG:EA",
"166" : "O:DAG:DA",
"167" : "O:EAG:EA",
"168" : "O:DAG:DA",
"169" : "O:DAG:DA",
"170" : "O:%sG:DA",
"171" : "O:%sG:DA",
"172" : "O:DAG:DA",
"173" : "O:DAG:DA",
"174" : "O:DAG:DA",
"175" : "O:DAG:DA",
},
}
# Discover 'msDS-Behavior-Version'
res = self.ldb_admin.search(base=self.base_dn, expression="distinguishedName=%s" % self.base_dn, \
attrs=['msDS-Behavior-Version'])
res = int(res[0]['msDS-Behavior-Version'][0])
if res < DS_DOMAIN_FUNCTION_2008:
self.DS_BEHAVIOR = "ds_behavior_win2003"
else:
self.DS_BEHAVIOR = "ds_behavior_win2008"
def tearDown(self):
super(DescriptorTests, self).tearDown()
self.deleteAll()
def check_user_belongs(self, user_dn, groups=[]):
""" Test wether user is member of the expected group(s) """
if groups != []:
# User is member of at least one additional group
res = self.ldb_admin.search(user_dn, attrs=["memberOf"])
res = [x.upper() for x in sorted(list(res[0]["memberOf"]))]
expected = []
for x in groups:
expected.append(self.get_users_domain_dn(x))
expected = [x.upper() for x in sorted(expected)]
self.assertEqual(expected, res)
else:
# User is not a member of any additional groups but default
res = self.ldb_admin.search(user_dn, attrs=["*"])
res = [x.upper() for x in res[0].keys()]
self.assertFalse( "MEMBEROF" in res)
def check_modify_inheritance(self, _ldb, object_dn, owner_group=""):
# Modify
sd_user_utils = sd_utils.SDUtils(_ldb)
ace = "(D;;CC;;;LG)" # Deny Create Children to Guest account
if owner_group != "":
sd_user_utils.modify_sd_on_dn(object_dn, owner_group + "D:" + ace)
else:
sd_user_utils.modify_sd_on_dn(object_dn, "D:" + ace)
# Make sure the modify operation has been applied
desc_sddl = self.sd_utils.get_sd_as_sddl(object_dn)
self.assertTrue(ace in desc_sddl)
# Make sure we have identical result for both "add" and "modify"
res = re.search("(O:.*G:.*?)D:", desc_sddl).group(1)
print self._testMethodName
test_number = self._testMethodName[5:]
self.assertEqual(self.results[self.DS_BEHAVIOR][test_number], res)
def test_100(self):
""" Enterprise admin group member creates object (default nTSecurityDescriptor) in DOMAIN
"""
user_name = "testuser1"
self.check_user_belongs(self.get_users_domain_dn(user_name), ["Enterprise Admins"])
# Open Ldb connection with the tested user
_ldb = self.get_ldb_connection(user_name, "samba123@")
object_dn = "CN=test_domain_group1,CN=Users," + self.base_dn
delete_force(self.ldb_admin, object_dn)
_ldb.newgroup("test_domain_group1", grouptype=4)
desc_sddl = self.sd_utils.get_sd_as_sddl(object_dn)
res = re.search("(O:.*G:.*?)D:", desc_sddl).group(1)
self.assertEqual(self.results[self.DS_BEHAVIOR][self._testMethodName[5:]], res)
self.check_modify_inheritance(_ldb, object_dn)
def test_101(self):
""" Domain admin group member creates object (default nTSecurityDescriptor) in DOMAIN
"""
user_name = "testuser2"
self.check_user_belongs(self.get_users_domain_dn(user_name), ["Domain Admins"])
# Open Ldb connection with the tested user
_ldb = self.get_ldb_connection(user_name, "samba123@")
object_dn = "CN=test_domain_group1,CN=Users," + self.base_dn
delete_force(self.ldb_admin, object_dn)
_ldb.newgroup("test_domain_group1", grouptype=4)
desc_sddl = self.sd_utils.get_sd_as_sddl(object_dn)
res = re.search("(O:.*G:.*?)D:", desc_sddl).group(1)
self.assertEqual(self.results[self.DS_BEHAVIOR][self._testMethodName[5:]], res)
self.check_modify_inheritance(_ldb, object_dn)
def test_102(self):
""" Schema admin group member with CC right creates object (default nTSecurityDescriptor) in DOMAIN
"""
user_name = "testuser3"
self.check_user_belongs(self.get_users_domain_dn(user_name), ["Schema Admins"])
# Open Ldb connection with the tested user
_ldb = self.get_ldb_connection(user_name, "samba123@")
object_dn = "OU=test_domain_ou1," + self.base_dn
delete_force(self.ldb_admin, object_dn)
self.ldb_admin.create_ou(object_dn)
user_sid = self.sd_utils.get_object_sid( self.get_users_domain_dn(user_name) )
mod = "(A;CI;WPWDCC;;;%s)" % str(user_sid)
self.sd_utils.dacl_add_ace(object_dn, mod)
# Create additional object into the first one
object_dn = "CN=test_domain_user1," + object_dn
delete_force(self.ldb_admin, object_dn)
_ldb.newuser("test_domain_user1", "samba123@",
userou="OU=test_domain_ou1", setpassword=False)
desc_sddl = self.sd_utils.get_sd_as_sddl(object_dn)
res = re.search("(O:.*G:.*?)D:", desc_sddl).group(1)
self.assertEqual(self.results[self.DS_BEHAVIOR][self._testMethodName[5:]] % str(user_sid), res)
# This fails, research why
#self.check_modify_inheritance(_ldb, object_dn)
def test_103(self):
""" Regular user with CC right creates object (default nTSecurityDescriptor) in DOMAIN
"""
user_name = "testuser4"
self.check_user_belongs(self.get_users_domain_dn(user_name), [])
# Open Ldb connection with the tested user
_ldb = self.get_ldb_connection(user_name, "samba123@")
object_dn = "OU=test_domain_ou1," + self.base_dn
delete_force(self.ldb_admin, object_dn)
self.ldb_admin.create_ou(object_dn)
user_sid = self.sd_utils.get_object_sid( self.get_users_domain_dn(user_name) )
mod = "(A;CI;WPWDCC;;;%s)" % str(user_sid)
self.sd_utils.dacl_add_ace(object_dn, mod)
# Create additional object into the first one
object_dn = "CN=test_domain_user1," + object_dn
delete_force(self.ldb_admin, object_dn)
_ldb.newuser("test_domain_user1", "samba123@",
userou="OU=test_domain_ou1", setpassword=False)
desc_sddl = self.sd_utils.get_sd_as_sddl(object_dn)
res = re.search("(O:.*G:.*?)D:", desc_sddl).group(1)
self.assertEqual(self.results[self.DS_BEHAVIOR][self._testMethodName[5:]] % str(user_sid), res)
#this fails, research why
#self.check_modify_inheritance(_ldb, object_dn)
def test_104(self):
""" Enterprise & Domain admin group member creates object (default nTSecurityDescriptor) in DOMAIN
"""
user_name = "testuser5"
self.check_user_belongs(self.get_users_domain_dn(user_name), ["Enterprise Admins", "Domain Admins"])
# Open Ldb connection with the tested user
_ldb = self.get_ldb_connection(user_name, "samba123@")
object_dn = "CN=test_domain_group1,CN=Users," + self.base_dn
delete_force(self.ldb_admin, object_dn)
_ldb.newgroup("test_domain_group1", grouptype=4)
desc_sddl = self.sd_utils.get_sd_as_sddl(object_dn)
res = re.search("(O:.*G:.*?)D:", desc_sddl).group(1)
self.assertEqual(self.results[self.DS_BEHAVIOR][self._testMethodName[5:]], res)
self.check_modify_inheritance(_ldb, object_dn)
def test_105(self):
""" Enterprise & Domain & Schema admin group member creates object (default nTSecurityDescriptor) in DOMAIN
"""
user_name = "testuser6"
self.check_user_belongs(self.get_users_domain_dn(user_name), ["Enterprise Admins", "Domain Admins", "Schema Admins"])
# Open Ldb connection with the tested user
_ldb = self.get_ldb_connection(user_name, "samba123@")
object_dn = "CN=test_domain_group1,CN=Users," + self.base_dn
delete_force(self.ldb_admin, object_dn)
_ldb.newgroup("test_domain_group1", grouptype=4)
desc_sddl = self.sd_utils.get_sd_as_sddl(object_dn)
res = re.search("(O:.*G:.*?)D:", desc_sddl).group(1)
self.assertEqual(self.results[self.DS_BEHAVIOR][self._testMethodName[5:]], res)
self.check_modify_inheritance(_ldb, object_dn)
def test_106(self):
""" Domain & Schema admin group member creates object (default nTSecurityDescriptor) in DOMAIN
"""
user_name = "testuser7"
self.check_user_belongs(self.get_users_domain_dn(user_name), ["Domain Admins", "Schema Admins"])
# Open Ldb connection with the tested user
_ldb = self.get_ldb_connection(user_name, "samba123@")
object_dn = "CN=test_domain_group1,CN=Users," + self.base_dn
delete_force(self.ldb_admin, object_dn)
_ldb.newgroup("test_domain_group1", grouptype=4)
desc_sddl = self.sd_utils.get_sd_as_sddl(object_dn)
res = re.search("(O:.*G:.*?)D:", desc_sddl).group(1)
self.assertEqual(self.results[self.DS_BEHAVIOR][self._testMethodName[5:]], res)
self.check_modify_inheritance(_ldb, object_dn)
def test_107(self):
""" Enterprise & Schema admin group member creates object (default nTSecurityDescriptor) in DOMAIN
"""
user_name = "testuser8"
self.check_user_belongs(self.get_users_domain_dn(user_name), ["Enterprise Admins", "Schema Admins"])
# Open Ldb connection with the tested user
_ldb = self.get_ldb_connection(user_name, "samba123@")
object_dn = "CN=test_domain_group1,CN=Users," + self.base_dn
delete_force(self.ldb_admin, object_dn)
_ldb.newgroup("test_domain_group1", grouptype=4)
desc_sddl = self.sd_utils.get_sd_as_sddl(object_dn)
res = re.search("(O:.*G:.*?)D:", desc_sddl).group(1)
self.assertEqual(self.results[self.DS_BEHAVIOR][self._testMethodName[5:]], res)
self.check_modify_inheritance(_ldb, object_dn)
# Control descriptor tests #####################################################################
def test_108(self):
""" Enterprise admin group member creates object (custom descriptor) in DOMAIN
"""
user_name = "testuser1"
self.check_user_belongs(self.get_users_domain_dn(user_name), ["Enterprise Admins"])
# Open Ldb connection with the tested user
_ldb = self.get_ldb_connection(user_name, "samba123@")
object_dn = "CN=test_domain_group1,CN=Users," + self.base_dn
delete_force(self.ldb_admin, object_dn)
# Create a custom security descriptor
sddl = "O:DAG:DAD:(A;;RP;;;DU)"
tmp_desc = security.descriptor.from_sddl(sddl, self.domain_sid)
_ldb.newgroup("test_domain_group1", grouptype=4, sd=tmp_desc)
desc_sddl = self.sd_utils.get_sd_as_sddl(object_dn)
res = re.search("(O:.*G:.*?)D:", desc_sddl).group(1)
self.assertEqual(self.results[self.DS_BEHAVIOR][self._testMethodName[5:]], res)
def test_109(self):
""" Domain admin group member creates object (custom descriptor) in DOMAIN
"""
user_name = "testuser2"
self.check_user_belongs(self.get_users_domain_dn(user_name), ["Domain Admins"])
# Open Ldb connection with the tested user
_ldb = self.get_ldb_connection(user_name, "samba123@")
object_dn = "CN=test_domain_group1,CN=Users," + self.base_dn
delete_force(self.ldb_admin, object_dn)
# Create a custom security descriptor
sddl = "O:DAG:DAD:(A;;RP;;;DU)"
tmp_desc = security.descriptor.from_sddl(sddl, self.domain_sid)
_ldb.newgroup("test_domain_group1", grouptype=4, sd=tmp_desc)
desc_sddl = self.sd_utils.get_sd_as_sddl(object_dn)
res = re.search("(O:.*G:.*?)D:", desc_sddl).group(1)
self.assertEqual(self.results[self.DS_BEHAVIOR][self._testMethodName[5:]], res)
def test_110(self):
""" Schema admin group member with CC right creates object (custom descriptor) in DOMAIN
"""
user_name = "testuser3"
self.check_user_belongs(self.get_users_domain_dn(user_name), ["Schema Admins"])
# Open Ldb connection with the tested user
_ldb = self.get_ldb_connection(user_name, "samba123@")
object_dn = "OU=test_domain_ou1," + self.base_dn
delete_force(self.ldb_admin, object_dn)
self.ldb_admin.create_ou(object_dn)
user_sid = self.sd_utils.get_object_sid( self.get_users_domain_dn(user_name) )
mod = "(A;CI;WOWDCC;;;%s)" % str(user_sid)
self.sd_utils.dacl_add_ace(object_dn, mod)
# Create a custom security descriptor
# NB! Problematic owner part won't accept DA only <User Sid> !!!
sddl = "O:%sG:DAD:(A;;RP;;;DU)" % str(user_sid)
tmp_desc = security.descriptor.from_sddl(sddl, self.domain_sid)
# Create additional object into the first one
object_dn = "CN=test_domain_user1," + object_dn
delete_force(self.ldb_admin, object_dn)
_ldb.newuser("test_domain_user1", "samba123@",
userou="OU=test_domain_ou1", sd=tmp_desc, setpassword=False)
desc = self.sd_utils.read_sd_on_dn(object_dn)
desc_sddl = self.sd_utils.get_sd_as_sddl(object_dn)
res = re.search("(O:.*G:.*?)D:", desc_sddl).group(1)
self.assertEqual(self.results[self.DS_BEHAVIOR][self._testMethodName[5:]] % str(user_sid), res)
def test_111(self):
""" Regular user with CC right creates object (custom descriptor) in DOMAIN
"""
user_name = "testuser4"
self.check_user_belongs(self.get_users_domain_dn(user_name), [])
# Open Ldb connection with the tested user
_ldb = self.get_ldb_connection(user_name, "samba123@")
object_dn = "OU=test_domain_ou1," + self.base_dn
delete_force(self.ldb_admin, object_dn)
self.ldb_admin.create_ou(object_dn)
user_sid = self.sd_utils.get_object_sid( self.get_users_domain_dn(user_name) )
mod = "(A;CI;WOWDCC;;;%s)" % str(user_sid)
self.sd_utils.dacl_add_ace(object_dn, mod)
# Create a custom security descriptor
# NB! Problematic owner part won't accept DA only <User Sid> !!!
sddl = "O:%sG:DAD:(A;;RP;;;DU)" % str(user_sid)
tmp_desc = security.descriptor.from_sddl(sddl, self.domain_sid)
# Create additional object into the first one
object_dn = "CN=test_domain_user1," + object_dn
delete_force(self.ldb_admin, object_dn)
_ldb.newuser("test_domain_user1", "samba123@",
userou="OU=test_domain_ou1", sd=tmp_desc, setpassword=False)
desc = self.sd_utils.read_sd_on_dn(object_dn)
desc_sddl = self.sd_utils.get_sd_as_sddl(object_dn)
res = re.search("(O:.*G:.*?)D:", desc_sddl).group(1)
self.assertEqual(self.results[self.DS_BEHAVIOR][self._testMethodName[5:]] % str(user_sid), res)
def test_112(self):
""" Domain & Enterprise admin group member creates object (custom descriptor) in DOMAIN
"""
user_name = "testuser5"
self.check_user_belongs(self.get_users_domain_dn(user_name), ["Enterprise Admins", "Domain Admins"])
# Open Ldb connection with the tested user
_ldb = self.get_ldb_connection(user_name, "samba123@")
object_dn = "CN=test_domain_group1,CN=Users," + self.base_dn
delete_force(self.ldb_admin, object_dn)
# Create a custom security descriptor
sddl = "O:DAG:DAD:(A;;RP;;;DU)"
tmp_desc = security.descriptor.from_sddl(sddl, self.domain_sid)
_ldb.newgroup("test_domain_group1", grouptype=4, sd=tmp_desc)
desc_sddl = self.sd_utils.get_sd_as_sddl(object_dn)
res = re.search("(O:.*G:.*?)D:", desc_sddl).group(1)
self.assertEqual(self.results[self.DS_BEHAVIOR][self._testMethodName[5:]], res)
def test_113(self):
""" Domain & Enterprise & Schema admin group member creates object (custom descriptor) in DOMAIN
"""
user_name = "testuser6"
self.check_user_belongs(self.get_users_domain_dn(user_name), ["Enterprise Admins", "Domain Admins", "Schema Admins"])
# Open Ldb connection with the tested user
_ldb = self.get_ldb_connection(user_name, "samba123@")
object_dn = "CN=test_domain_group1,CN=Users," + self.base_dn
delete_force(self.ldb_admin, object_dn)
# Create a custom security descriptor
sddl = "O:DAG:DAD:(A;;RP;;;DU)"
tmp_desc = security.descriptor.from_sddl(sddl, self.domain_sid)
_ldb.newgroup("test_domain_group1", grouptype=4, sd=tmp_desc)
desc_sddl = self.sd_utils.get_sd_as_sddl(object_dn)
res = re.search("(O:.*G:.*?)D:", desc_sddl).group(1)
self.assertEqual(self.results[self.DS_BEHAVIOR][self._testMethodName[5:]], res)
def test_114(self):
""" Domain & Schema admin group member creates object (custom descriptor) in DOMAIN
"""
user_name = "testuser7"
self.check_user_belongs(self.get_users_domain_dn(user_name), ["Domain Admins", "Schema Admins"])
# Open Ldb connection with the tested user
_ldb = self.get_ldb_connection(user_name, "samba123@")
object_dn = "CN=test_domain_group1,CN=Users," + self.base_dn
delete_force(self.ldb_admin, object_dn)
# Create a custom security descriptor
sddl = "O:DAG:DAD:(A;;RP;;;DU)"
tmp_desc = security.descriptor.from_sddl(sddl, self.domain_sid)
_ldb.newgroup("test_domain_group1", grouptype=4, sd=tmp_desc)
desc_sddl = self.sd_utils.get_sd_as_sddl(object_dn)
res = re.search("(O:.*G:.*?)D:", desc_sddl).group(1)
self.assertEqual(self.results[self.DS_BEHAVIOR][self._testMethodName[5:]], res)
def test_115(self):
""" Enterprise & Schema admin group member creates object (custom descriptor) in DOMAIN
"""
user_name = "testuser8"
self.check_user_belongs(self.get_users_domain_dn(user_name), ["Enterprise Admins", "Schema Admins"])
# Open Ldb connection with the tested user
_ldb = self.get_ldb_connection(user_name, "samba123@")
object_dn = "CN=test_domain_group1,CN=Users," + self.base_dn
delete_force(self.ldb_admin, object_dn)
# Create a custom security descriptor
sddl = "O:DAG:DAD:(A;;RP;;;DU)"
tmp_desc = security.descriptor.from_sddl(sddl, self.domain_sid)
_ldb.newgroup("test_domain_group1", grouptype=4, sd=tmp_desc)
desc_sddl = self.sd_utils.get_sd_as_sddl(object_dn)
res = re.search("(O:.*G:.*?)D:", desc_sddl).group(1)
self.assertEqual(self.results[self.DS_BEHAVIOR][self._testMethodName[5:]], res)
def test_999(self):
user_name = "Administrator"
object_dn = "OU=test_domain_ou1," + self.base_dn
delete_force(self.ldb_admin, object_dn)
self.ldb_admin.create_ou(object_dn)
user_sid = self.sd_utils.get_object_sid( self.get_users_domain_dn(user_name) )
mod = "(D;CI;WP;;;S-1-3-0)"
#mod = ""
self.sd_utils.dacl_add_ace(object_dn, mod)
desc_sddl = self.sd_utils.get_sd_as_sddl(object_dn)
# Create additional object into the first one
object_dn = "OU=test_domain_ou2," + object_dn
delete_force(self.ldb_admin, object_dn)
self.ldb_admin.create_ou(object_dn)
desc_sddl = self.sd_utils.get_sd_as_sddl(object_dn)
## Tests for SCHEMA
# Defalt descriptor tests ##################################################################
def test_130(self):
user_name = "testuser1"
self.check_user_belongs(self.get_users_domain_dn(user_name), ["Enterprise Admins"])
# Open Ldb connection with the tested user
_ldb = self.get_ldb_connection(user_name, "samba123@")
# Change Schema partition descriptor
user_sid = self.sd_utils.get_object_sid( self.get_users_domain_dn(user_name) )
mod = "(A;;WDCC;;;AU)"
self.sd_utils.dacl_add_ace(self.schema_dn, mod)
# Create example Schema class
class_name = self.get_unique_schema_class_name()
class_dn = "CN=%s,%s" % (class_name, self.schema_dn)
self.create_schema_class(_ldb, class_dn)
desc_sddl = self.sd_utils.get_sd_as_sddl(class_dn)
res = re.search("(O:.*G:.*?)D:", desc_sddl).group(1)
self.assertEqual(self.results[self.DS_BEHAVIOR][self._testMethodName[5:]], res)
self.check_modify_inheritance(_ldb, class_dn)
def test_131(self):
user_name = "testuser2"
self.check_user_belongs(self.get_users_domain_dn(user_name), ["Domain Admins"])
# Open Ldb connection with the tested user
_ldb = self.get_ldb_connection(user_name, "samba123@")
# Change Schema partition descriptor
mod = "(A;CI;WDCC;;;AU)"
self.sd_utils.dacl_add_ace(self.schema_dn, mod)
# Create example Schema class
class_name = self.get_unique_schema_class_name()
class_dn = "CN=%s,%s" % (class_name, self.schema_dn)
self.create_schema_class(_ldb, class_dn)
desc_sddl = self.sd_utils.get_sd_as_sddl(class_dn)
res = re.search("(O:.*G:.*?)D:", desc_sddl).group(1)
self.assertEqual(self.results[self.DS_BEHAVIOR][self._testMethodName[5:]], res)
self.check_modify_inheritance(_ldb, class_dn)
def test_132(self):
user_name = "testuser3"
self.check_user_belongs(self.get_users_domain_dn(user_name), ["Schema Admins"])
# Open Ldb connection with the tested user
_ldb = self.get_ldb_connection(user_name, "samba123@")
# Change Schema partition descriptor
mod = "(A;CI;WDCC;;;AU)"
self.sd_utils.dacl_add_ace(self.schema_dn, mod)
# Create example Schema class
class_name = self.get_unique_schema_class_name()
class_dn = "CN=%s,%s" % (class_name, self.schema_dn)
self.create_schema_class(_ldb, class_dn)
desc_sddl = self.sd_utils.get_sd_as_sddl(class_dn)
res = re.search("(O:.*G:.*?)D:", desc_sddl).group(1)
self.assertEqual(self.results[self.DS_BEHAVIOR][self._testMethodName[5:]], res)
#self.check_modify_inheritance(_ldb, class_dn)
def test_133(self):
user_name = "testuser4"
self.check_user_belongs(self.get_users_domain_dn(user_name), [])
# Open Ldb connection with the tested user
_ldb = self.get_ldb_connection(user_name, "samba123@")
#Change Schema partition descriptor
user_sid = self.sd_utils.get_object_sid( self.get_users_domain_dn(user_name) )
mod = "(A;CI;WDCC;;;AU)"
self.sd_utils.dacl_add_ace(self.schema_dn, mod)
# Create example Schema class
class_name = self.get_unique_schema_class_name()
class_dn = "CN=%s,%s" % (class_name, self.schema_dn)
self.create_schema_class(_ldb, class_dn)
desc_sddl = self.sd_utils.get_sd_as_sddl(class_dn)
res = re.search("(O:.*G:.*?)D:", desc_sddl).group(1)
self.assertEqual(self.results[self.DS_BEHAVIOR][self._testMethodName[5:]] % str(user_sid), res)
#self.check_modify_inheritance(_ldb, class_dn)
def test_134(self):
user_name = "testuser5"
self.check_user_belongs(self.get_users_domain_dn(user_name), ["Enterprise Admins", "Domain Admins"])
# Open Ldb connection with the tested user
_ldb = self.get_ldb_connection(user_name, "samba123@")
#Change Schema partition descriptor
mod = "(A;CI;WDCC;;;AU)"
self.sd_utils.dacl_add_ace(self.schema_dn, mod)
# Create example Schema class
class_name = self.get_unique_schema_class_name()
class_dn = "CN=%s,%s" % (class_name, self.schema_dn)
self.create_schema_class(_ldb, class_dn)
desc_sddl = self.sd_utils.get_sd_as_sddl(class_dn)
res = re.search("(O:.*G:.*?)D:", desc_sddl).group(1)
self.assertEqual(self.results[self.DS_BEHAVIOR][self._testMethodName[5:]], res)
self.check_modify_inheritance(_ldb, class_dn)
def test_135(self):
user_name = "testuser6"
self.check_user_belongs(self.get_users_domain_dn(user_name), ["Enterprise Admins", "Domain Admins", "Schema Admins"])
# Open Ldb connection with the tested user
_ldb = self.get_ldb_connection(user_name, "samba123@")
# Change Schema partition descriptor
mod = "(A;CI;WDCC;;;AU)"
self.sd_utils.dacl_add_ace(self.schema_dn, mod)
# Create example Schema class
class_name = self.get_unique_schema_class_name()
class_dn = "CN=%s,%s" % (class_name, self.schema_dn)
self.create_schema_class(_ldb, class_dn)
desc_sddl = self.sd_utils.get_sd_as_sddl(class_dn)
res = re.search("(O:.*G:.*?)D:", desc_sddl).group(1)
self.assertEqual(self.results[self.DS_BEHAVIOR][self._testMethodName[5:]], res)
self.check_modify_inheritance(_ldb, class_dn)
def test_136(self):
user_name = "testuser7"
self.check_user_belongs(self.get_users_domain_dn(user_name), ["Domain Admins", "Schema Admins"])
# Open Ldb connection with the tested user
_ldb = self.get_ldb_connection(user_name, "samba123@")
# Change Schema partition descriptor
mod = "(A;CI;WDCC;;;AU)"
self.sd_utils.dacl_add_ace(self.schema_dn, mod)
# Create example Schema class
class_name = self.get_unique_schema_class_name()
class_dn = "CN=%s,%s" % (class_name, self.schema_dn)
self.create_schema_class(_ldb, class_dn)
desc_sddl = self.sd_utils.get_sd_as_sddl(class_dn)
res = re.search("(O:.*G:.*?)D:", desc_sddl).group(1)
self.assertEqual(self.results[self.DS_BEHAVIOR][self._testMethodName[5:]], res)
self.check_modify_inheritance(_ldb, class_dn)
def test_137(self):
user_name = "testuser8"
self.check_user_belongs(self.get_users_domain_dn(user_name), ["Enterprise Admins", "Schema Admins"])
# Open Ldb connection with the tested user
_ldb = self.get_ldb_connection(user_name, "samba123@")
# Change Schema partition descriptor
mod = "(A;CI;WDCC;;;AU)"
self.sd_utils.dacl_add_ace(self.schema_dn, mod)
# Create example Schema class
class_name = self.get_unique_schema_class_name()
class_dn = "CN=%s,%s" % (class_name, self.schema_dn)
self.create_schema_class(_ldb, class_dn)
desc_sddl = self.sd_utils.get_sd_as_sddl(class_dn)
res = re.search("(O:.*G:.*?)D:", desc_sddl).group(1)
self.assertEqual(self.results[self.DS_BEHAVIOR][self._testMethodName[5:]], res)
self.check_modify_inheritance(_ldb, class_dn)
# Custom descriptor tests ##################################################################
def test_138(self):
user_name = "testuser1"
self.check_user_belongs(self.get_users_domain_dn(user_name), ["Enterprise Admins"])
# Open Ldb connection with the tested user
_ldb = self.get_ldb_connection(user_name, "samba123@")
# Change Schema partition descriptor
mod = "(A;;CC;;;AU)"
self.sd_utils.dacl_add_ace(self.schema_dn, mod)
# Create a custom security descriptor
desc_sddl = "O:DAG:DAD:(A;;RP;;;DU)"
# Create example Schema class
class_name = self.get_unique_schema_class_name()
class_dn = "CN=%s,%s" % (class_name, self.schema_dn)
self.create_schema_class(_ldb, class_dn, desc_sddl)
desc_sddl = self.sd_utils.get_sd_as_sddl(class_dn)
res = re.search("(O:.*G:.*?)D:", desc_sddl).group(1)
self.assertEqual("O:DAG:DA", res)
def test_139(self):
user_name = "testuser2"
self.check_user_belongs(self.get_users_domain_dn(user_name), ["Domain Admins"])
# Open Ldb connection with the tested user
_ldb = self.get_ldb_connection(user_name, "samba123@")
# Change Schema partition descriptor
mod = "(A;;CC;;;AU)"
self.sd_utils.dacl_add_ace(self.schema_dn, mod)
# Create a custom security descriptor
desc_sddl = "O:DAG:DAD:(A;;RP;;;DU)"
# Create example Schema class
class_name = self.get_unique_schema_class_name()
class_dn = "CN=%s,%s" % (class_name, self.schema_dn)
self.create_schema_class(_ldb, class_dn, desc_sddl)
desc_sddl = self.sd_utils.get_sd_as_sddl(class_dn)
res = re.search("(O:.*G:.*?)D:", desc_sddl).group(1)
self.assertEqual("O:DAG:DA", res)
def test_140(self):
user_name = "testuser3"
self.check_user_belongs(self.get_users_domain_dn(user_name), ["Schema Admins"])
# Open Ldb connection with the tested user
_ldb = self.get_ldb_connection(user_name, "samba123@")
# Create a custom security descriptor
# NB! Problematic owner part won't accept DA only <User Sid> !!!
user_sid = self.sd_utils.get_object_sid( self.get_users_domain_dn(user_name) )
desc_sddl = "O:%sG:DAD:(A;;RP;;;DU)" % str(user_sid)
# Create example Schema class
class_name = self.get_unique_schema_class_name()
class_dn = "CN=%s,%s" % (class_name, self.schema_dn)
self.create_schema_class(_ldb, class_dn, desc_sddl)
desc_sddl = self.sd_utils.get_sd_as_sddl(class_dn)
res = re.search("(O:.*G:.*?)D:", desc_sddl).group(1)
self.assertEqual(self.results[self.DS_BEHAVIOR][self._testMethodName[5:]] % str(user_sid), res)
def test_141(self):
user_name = "testuser4"
self.check_user_belongs(self.get_users_domain_dn(user_name), [])
# Open Ldb connection with the tested user
_ldb = self.get_ldb_connection(user_name, "samba123@")
# Create a custom security descriptor
# NB! Problematic owner part won't accept DA only <User Sid> !!!
user_sid = self.sd_utils.get_object_sid( self.get_users_domain_dn(user_name) )
desc_sddl = "O:%sG:DAD:(A;;RP;;;DU)" % str(user_sid)
# Create example Schema class
class_name = self.get_unique_schema_class_name()
class_dn = "CN=%s,%s" % (class_name, self.schema_dn)
self.create_schema_class(_ldb, class_dn, desc_sddl)
desc_sddl = self.sd_utils.get_sd_as_sddl(class_dn)
res = re.search("(O:.*G:.*?)D:", desc_sddl).group(1)
self.assertEqual(self.results[self.DS_BEHAVIOR][self._testMethodName[5:]] % str(user_sid), res)
def test_142(self):
user_name = "testuser5"
self.check_user_belongs(self.get_users_domain_dn(user_name), ["Enterprise Admins", "Domain Admins"])
# Open Ldb connection with the tested user
_ldb = self.get_ldb_connection(user_name, "samba123@")
# Change Schema partition descriptor
mod = "(A;;CC;;;AU)"
self.sd_utils.dacl_add_ace(self.schema_dn, mod)
# Create a custom security descriptor
desc_sddl = "O:DAG:DAD:(A;;RP;;;DU)"
# Create example Schema class
class_name = self.get_unique_schema_class_name()
class_dn = "CN=%s,%s" % (class_name, self.schema_dn)
self.create_schema_class(_ldb, class_dn, desc_sddl)
desc_sddl = self.sd_utils.get_sd_as_sddl(class_dn)
res = re.search("(O:.*G:.*?)D:", desc_sddl).group(1)
self.assertEqual("O:DAG:DA", res)
def test_143(self):
user_name = "testuser6"
self.check_user_belongs(self.get_users_domain_dn(user_name), ["Enterprise Admins", "Domain Admins", "Schema Admins"])
# Open Ldb connection with the tested user
_ldb = self.get_ldb_connection(user_name, "samba123@")
# Change Schema partition descriptor
mod = "(A;;CC;;;AU)"
self.sd_utils.dacl_add_ace(self.schema_dn, mod)
# Create a custom security descriptor
desc_sddl = "O:DAG:DAD:(A;;RP;;;DU)"
# Create example Schema class
class_name = self.get_unique_schema_class_name()
class_dn = "CN=%s,%s" % (class_name, self.schema_dn)
self.create_schema_class(_ldb, class_dn, desc_sddl)
desc_sddl = self.sd_utils.get_sd_as_sddl(class_dn)
res = re.search("(O:.*G:.*?)D:", desc_sddl).group(1)
self.assertEqual("O:DAG:DA", res)
def test_144(self):
user_name = "testuser7"
self.check_user_belongs(self.get_users_domain_dn(user_name), ["Domain Admins", "Schema Admins"])
# Open Ldb connection with the tested user
_ldb = self.get_ldb_connection(user_name, "samba123@")
# Change Schema partition descriptor
mod = "(A;;CC;;;AU)"
self.sd_utils.dacl_add_ace(self.schema_dn, mod)
# Create a custom security descriptor
desc_sddl = "O:DAG:DAD:(A;;RP;;;DU)"
# Create example Schema class
class_name = self.get_unique_schema_class_name()
class_dn = "CN=%s,%s" % (class_name, self.schema_dn)
self.create_schema_class(_ldb, class_dn, desc_sddl)
desc_sddl = self.sd_utils.get_sd_as_sddl(class_dn)
res = re.search("(O:.*G:.*?)D:", desc_sddl).group(1)
self.assertEqual("O:DAG:DA", res)
def test_145(self):
user_name = "testuser8"
self.check_user_belongs(self.get_users_domain_dn(user_name), ["Enterprise Admins", "Schema Admins"])
# Open Ldb connection with the tested user
_ldb = self.get_ldb_connection(user_name, "samba123@")
# Change Schema partition descriptor
mod = "(A;;CC;;;AU)"
self.sd_utils.dacl_add_ace(self.schema_dn, mod)
# Create a custom security descriptor
desc_sddl = "O:DAG:DAD:(A;;RP;;;DU)"
# Create example Schema class
class_name = self.get_unique_schema_class_name()
class_dn = "CN=%s,%s" % (class_name, self.schema_dn)
self.create_schema_class(_ldb, class_dn, desc_sddl)
desc_sddl = self.sd_utils.get_sd_as_sddl(class_dn)
res = re.search("(O:.*G:.*?)D:", desc_sddl).group(1)
self.assertEqual("O:DAG:DA", res)
## Tests for CONFIGURATION
# Defalt descriptor tests ##################################################################
def test_160(self):
user_name = "testuser1"
self.check_user_belongs(self.get_users_domain_dn(user_name), ["Enterprise Admins"])
# Open Ldb connection with the tested user
_ldb = self.get_ldb_connection(user_name, "samba123@")
# Create example Configuration container
container_name = "test-container1"
object_dn = "CN=%s,CN=DisplaySpecifiers,%s" % (container_name, self.configuration_dn)
delete_force(self.ldb_admin, object_dn)
self.create_configuration_container(_ldb, object_dn, )
desc_sddl = self.sd_utils.get_sd_as_sddl(object_dn)
res = re.search("(O:.*G:.*?)D:", desc_sddl).group(1)
self.assertEqual(self.results[self.DS_BEHAVIOR][self._testMethodName[5:]], res)
self.check_modify_inheritance(_ldb, object_dn)
def test_161(self):
user_name = "testuser2"
self.check_user_belongs(self.get_users_domain_dn(user_name), ["Domain Admins"])
# Open Ldb connection with the tested user
_ldb = self.get_ldb_connection(user_name, "samba123@")
# Create example Configuration container
container_name = "test-container1"
object_dn = "CN=%s,CN=DisplaySpecifiers,%s" % (container_name, self.configuration_dn)
delete_force(self.ldb_admin, object_dn)
self.create_configuration_container(_ldb, object_dn, )
desc_sddl = self.sd_utils.get_sd_as_sddl(object_dn)
res = re.search("(O:.*G:.*?)D:", desc_sddl).group(1)
self.assertEqual(self.results[self.DS_BEHAVIOR][self._testMethodName[5:]], res)
self.check_modify_inheritance(_ldb, object_dn)
def test_162(self):
user_name = "testuser3"
self.check_user_belongs(self.get_users_domain_dn(user_name), ["Schema Admins"])
# Open Ldb connection with the tested user
_ldb = self.get_ldb_connection(user_name, "samba123@")
# Create example Configuration container
object_dn = "CN=test-container1,CN=DisplaySpecifiers," + self.configuration_dn
delete_force(self.ldb_admin, object_dn)
self.create_configuration_container(self.ldb_admin, object_dn, )
user_sid = self.sd_utils.get_object_sid( self.get_users_domain_dn(user_name) )
mod = "(A;;WDCC;;;AU)"
self.sd_utils.dacl_add_ace(object_dn, mod)
# Create child object with user's credentials
object_dn = "CN=test-specifier1," + object_dn
delete_force(self.ldb_admin, object_dn)
self.create_configuration_specifier(_ldb, object_dn)
desc_sddl = self.sd_utils.get_sd_as_sddl(object_dn)
res = re.search("(O:.*G:.*?)D:", desc_sddl).group(1)
self.assertEqual(self.results[self.DS_BEHAVIOR][self._testMethodName[5:]] % str(user_sid), res)
#self.check_modify_inheritance(_ldb, object_dn)
def test_163(self):
user_name = "testuser4"
self.check_user_belongs(self.get_users_domain_dn(user_name), [])
# Open Ldb connection with the tested user
_ldb = self.get_ldb_connection(user_name, "samba123@")
# Create example Configuration container
object_dn = "CN=test-container1,CN=DisplaySpecifiers," + self.configuration_dn
delete_force(self.ldb_admin, object_dn)
self.create_configuration_container(self.ldb_admin, object_dn, )
user_sid = self.sd_utils.get_object_sid( self.get_users_domain_dn(user_name) )
mod = "(A;CI;WDCC;;;AU)"
self.sd_utils.dacl_add_ace(object_dn, mod)
# Create child object with user's credentials
object_dn = "CN=test-specifier1," + object_dn
delete_force(self.ldb_admin, object_dn)
self.create_configuration_specifier(_ldb, object_dn)
desc_sddl = self.sd_utils.get_sd_as_sddl(object_dn)
res = re.search("(O:.*G:.*?)D:", desc_sddl).group(1)
self.assertEqual(self.results[self.DS_BEHAVIOR][self._testMethodName[5:]] % str(user_sid), res)
#self.check_modify_inheritance(_ldb, object_dn)
def test_164(self):
user_name = "testuser5"
self.check_user_belongs(self.get_users_domain_dn(user_name), ["Enterprise Admins", "Domain Admins"])
# Open Ldb connection with the tested user
_ldb = self.get_ldb_connection(user_name, "samba123@")
# Create example Configuration container
container_name = "test-container1"
object_dn = "CN=%s,CN=DisplaySpecifiers,%s" % (container_name, self.configuration_dn)
delete_force(self.ldb_admin, object_dn)
self.create_configuration_container(_ldb, object_dn, )
desc_sddl = self.sd_utils.get_sd_as_sddl(object_dn)
res = re.search("(O:.*G:.*?)D:", desc_sddl).group(1)
self.assertEqual(self.results[self.DS_BEHAVIOR][self._testMethodName[5:]], res)
self.check_modify_inheritance(_ldb, object_dn)
def test_165(self):
user_name = "testuser6"
self.check_user_belongs(self.get_users_domain_dn(user_name), ["Enterprise Admins", "Domain Admins", "Schema Admins"])
# Open Ldb connection with the tested user
_ldb = self.get_ldb_connection(user_name, "samba123@")
# Create example Configuration container
container_name = "test-container1"
object_dn = "CN=%s,CN=DisplaySpecifiers,%s" % (container_name, self.configuration_dn)
delete_force(self.ldb_admin, object_dn)
self.create_configuration_container(_ldb, object_dn, )
desc_sddl = self.sd_utils.get_sd_as_sddl(object_dn)
res = re.search("(O:.*G:.*?)D:", desc_sddl).group(1)
self.assertEqual(self.results[self.DS_BEHAVIOR][self._testMethodName[5:]], res)
self.check_modify_inheritance(_ldb, object_dn)
def test_166(self):
user_name = "testuser7"
self.check_user_belongs(self.get_users_domain_dn(user_name), ["Domain Admins", "Schema Admins"])
# Open Ldb connection with the tested user
_ldb = self.get_ldb_connection(user_name, "samba123@")
# Create example Configuration container
container_name = "test-container1"
object_dn = "CN=%s,CN=DisplaySpecifiers,%s" % (container_name, self.configuration_dn)
delete_force(self.ldb_admin, object_dn)
self.create_configuration_container(_ldb, object_dn, )
desc_sddl = self.sd_utils.get_sd_as_sddl(object_dn)
res = re.search("(O:.*G:.*?)D:", desc_sddl).group(1)
self.assertEqual(self.results[self.DS_BEHAVIOR][self._testMethodName[5:]], res)
self.check_modify_inheritance(_ldb, object_dn)
def test_167(self):
user_name = "testuser8"
self.check_user_belongs(self.get_users_domain_dn(user_name), ["Enterprise Admins", "Schema Admins"])
# Open Ldb connection with the tested user
_ldb = self.get_ldb_connection(user_name, "samba123@")
# Create example Configuration container
container_name = "test-container1"
object_dn = "CN=%s,CN=DisplaySpecifiers,%s" % (container_name, self.configuration_dn)
delete_force(self.ldb_admin, object_dn)
self.create_configuration_container(_ldb, object_dn, )
desc_sddl = self.sd_utils.get_sd_as_sddl(object_dn)
res = re.search("(O:.*G:.*?)D:", desc_sddl).group(1)
self.assertEqual(self.results[self.DS_BEHAVIOR][self._testMethodName[5:]], res)
self.check_modify_inheritance(_ldb, object_dn)
# Custom descriptor tests ##################################################################
def test_168(self):
user_name = "testuser1"
self.check_user_belongs(self.get_users_domain_dn(user_name), ["Enterprise Admins"])
# Open Ldb connection with the tested user
_ldb = self.get_ldb_connection(user_name, "samba123@")
# Create example Configuration container
container_name = "test-container1"
object_dn = "CN=%s,CN=DisplaySpecifiers,%s" % (container_name, self.configuration_dn)
delete_force(self.ldb_admin, object_dn)
# Create a custom security descriptor
desc_sddl = "O:DAG:DAD:(A;;RP;;;DU)"
self.create_configuration_container(_ldb, object_dn, desc_sddl)
desc_sddl = self.sd_utils.get_sd_as_sddl(object_dn)
res = re.search("(O:.*G:.*?)D:", desc_sddl).group(1)
self.assertEqual("O:DAG:DA", res)
def test_169(self):
user_name = "testuser2"
self.check_user_belongs(self.get_users_domain_dn(user_name), ["Domain Admins"])
# Open Ldb connection with the tested user
_ldb = self.get_ldb_connection(user_name, "samba123@")
# Create example Configuration container
container_name = "test-container1"
object_dn = "CN=%s,CN=DisplaySpecifiers,%s" % (container_name, self.configuration_dn)
delete_force(self.ldb_admin, object_dn)
# Create a custom security descriptor
desc_sddl = "O:DAG:DAD:(A;;RP;;;DU)"
self.create_configuration_container(_ldb, object_dn, desc_sddl)
desc_sddl = self.sd_utils.get_sd_as_sddl(object_dn)
res = re.search("(O:.*G:.*?)D:", desc_sddl).group(1)
self.assertEqual("O:DAG:DA", res)
def test_170(self):
user_name = "testuser3"
self.check_user_belongs(self.get_users_domain_dn(user_name), ["Schema Admins"])
# Open Ldb connection with the tested user
_ldb = self.get_ldb_connection(user_name, "samba123@")
# Create example Configuration container
object_dn = "CN=test-container1,CN=DisplaySpecifiers," + self.configuration_dn
delete_force(self.ldb_admin, object_dn)
self.create_configuration_container(self.ldb_admin, object_dn, )
user_sid = self.sd_utils.get_object_sid( self.get_users_domain_dn(user_name) )
mod = "(A;;CC;;;AU)"
self.sd_utils.dacl_add_ace(object_dn, mod)
# Create child object with user's credentials
object_dn = "CN=test-specifier1," + object_dn
delete_force(self.ldb_admin, object_dn)
# Create a custom security descriptor
# NB! Problematic owner part won't accept DA only <User Sid> !!!
desc_sddl = "O:%sG:DAD:(A;;RP;;;DU)" % str(user_sid)
self.create_configuration_specifier(_ldb, object_dn, desc_sddl)
desc_sddl = self.sd_utils.get_sd_as_sddl(object_dn)
res = re.search("(O:.*G:.*?)D:", desc_sddl).group(1)
self.assertEqual(self.results[self.DS_BEHAVIOR][self._testMethodName[5:]] % str(user_sid), res)
def test_171(self):
user_name = "testuser4"
self.check_user_belongs(self.get_users_domain_dn(user_name), [])
# Open Ldb connection with the tested user
_ldb = self.get_ldb_connection(user_name, "samba123@")
# Create example Configuration container
object_dn = "CN=test-container1,CN=DisplaySpecifiers," + self.configuration_dn
delete_force(self.ldb_admin, object_dn)
self.create_configuration_container(self.ldb_admin, object_dn, )
user_sid = self.sd_utils.get_object_sid( self.get_users_domain_dn(user_name) )
mod = "(A;;CC;;;AU)"
self.sd_utils.dacl_add_ace(object_dn, mod)
# Create child object with user's credentials
object_dn = "CN=test-specifier1," + object_dn
delete_force(self.ldb_admin, object_dn)
# Create a custom security descriptor
# NB! Problematic owner part won't accept DA only <User Sid> !!!
desc_sddl = "O:%sG:DAD:(A;;RP;;;DU)" % str(user_sid)
self.create_configuration_specifier(_ldb, object_dn, desc_sddl)
desc_sddl = self.sd_utils.get_sd_as_sddl(object_dn)
res = re.search("(O:.*G:.*?)D:", desc_sddl).group(1)
self.assertEqual(self.results[self.DS_BEHAVIOR][self._testMethodName[5:]] % str(user_sid), res)
def test_172(self):
user_name = "testuser5"
self.check_user_belongs(self.get_users_domain_dn(user_name), ["Enterprise Admins", "Domain Admins"])
# Open Ldb connection with the tested user
_ldb = self.get_ldb_connection(user_name, "samba123@")
# Create example Configuration container
container_name = "test-container1"
object_dn = "CN=%s,CN=DisplaySpecifiers,%s" % (container_name, self.configuration_dn)
delete_force(self.ldb_admin, object_dn)
# Create a custom security descriptor
desc_sddl = "O:DAG:DAD:(A;;RP;;;DU)"
self.create_configuration_container(_ldb, object_dn, desc_sddl)
desc_sddl = self.sd_utils.get_sd_as_sddl(object_dn)
res = re.search("(O:.*G:.*?)D:", desc_sddl).group(1)
self.assertEqual("O:DAG:DA", res)
def test_173(self):
user_name = "testuser6"
self.check_user_belongs(self.get_users_domain_dn(user_name), ["Enterprise Admins", "Domain Admins", "Schema Admins"])
# Open Ldb connection with the tested user
_ldb = self.get_ldb_connection(user_name, "samba123@")
# Create example Configuration container
container_name = "test-container1"
object_dn = "CN=%s,CN=DisplaySpecifiers,%s" % (container_name, self.configuration_dn)
delete_force(self.ldb_admin, object_dn)
# Create a custom security descriptor
desc_sddl = "O:DAG:DAD:(A;;RP;;;DU)"
self.create_configuration_container(_ldb, object_dn, desc_sddl)
desc_sddl = self.sd_utils.get_sd_as_sddl(object_dn)
res = re.search("(O:.*G:.*?)D:", desc_sddl).group(1)
self.assertEqual("O:DAG:DA", res)
def test_174(self):
user_name = "testuser7"
self.check_user_belongs(self.get_users_domain_dn(user_name), ["Domain Admins", "Schema Admins"])
# Open Ldb connection with the tested user
_ldb = self.get_ldb_connection(user_name, "samba123@")
# Create example Configuration container
container_name = "test-container1"
object_dn = "CN=%s,CN=DisplaySpecifiers,%s" % (container_name, self.configuration_dn)
delete_force(self.ldb_admin, object_dn)
# Create a custom security descriptor
desc_sddl = "O:DAG:DAD:(A;;RP;;;DU)"
self.create_configuration_container(_ldb, object_dn, desc_sddl)
desc_sddl = self.sd_utils.get_sd_as_sddl(object_dn)
res = re.search("(O:.*G:.*?)D:", desc_sddl).group(1)
self.assertEqual("O:DAG:DA", res)
def test_175(self):
user_name = "testuser8"
self.check_user_belongs(self.get_users_domain_dn(user_name), ["Enterprise Admins", "Schema Admins"])
# Open Ldb connection with the tested user
_ldb = self.get_ldb_connection(user_name, "samba123@")
# Create example Configuration container
container_name = "test-container1"
object_dn = "CN=%s,CN=DisplaySpecifiers,%s" % (container_name, self.configuration_dn)
delete_force(self.ldb_admin, object_dn)
# Create a custom security descriptor
desc_sddl = "O:DAG:DAD:(A;;RP;;;DU)"
self.create_configuration_container(_ldb, object_dn, desc_sddl)
desc_sddl = self.sd_utils.get_sd_as_sddl(object_dn)
res = re.search("(O:.*G:.*?)D:", desc_sddl).group(1)
self.assertEqual("O:DAG:DA", res)
########################################################################################
# Inheritance tests for DACL
class DaclDescriptorTests(DescriptorTests):
def deleteAll(self):
delete_force(self.ldb_admin, "CN=test_inherit_group,OU=test_inherit_ou," + self.base_dn)
delete_force(self.ldb_admin, "OU=test_inherit_ou5,OU=test_inherit_ou1,OU=test_inherit_ou_p," + self.base_dn)
delete_force(self.ldb_admin, "OU=test_inherit_ou6,OU=test_inherit_ou2,OU=test_inherit_ou_p," + self.base_dn)
delete_force(self.ldb_admin, "OU=test_inherit_ou1,OU=test_inherit_ou_p," + self.base_dn)
delete_force(self.ldb_admin, "OU=test_inherit_ou2,OU=test_inherit_ou_p," + self.base_dn)
delete_force(self.ldb_admin, "OU=test_inherit_ou3,OU=test_inherit_ou_p," + self.base_dn)
delete_force(self.ldb_admin, "OU=test_inherit_ou4,OU=test_inherit_ou_p," + self.base_dn)
delete_force(self.ldb_admin, "OU=test_inherit_ou_p," + self.base_dn)
delete_force(self.ldb_admin, "OU=test_inherit_ou," + self.base_dn)
def setUp(self):
super(DaclDescriptorTests, self).setUp()
self.deleteAll()
def create_clean_ou(self, object_dn):
""" Base repeating setup for unittests to follow """
res = self.ldb_admin.search(base=self.base_dn, scope=SCOPE_SUBTREE, \
expression="distinguishedName=%s" % object_dn)
# Make sure top testing OU has been deleted before starting the test
self.assertEqual(len(res), 0)
self.ldb_admin.create_ou(object_dn)
desc_sddl = self.sd_utils.get_sd_as_sddl(object_dn)
# Make sure there are inheritable ACEs initially
self.assertTrue("CI" in desc_sddl or "OI" in desc_sddl)
# Find and remove all inherit ACEs
res = re.findall("\(.*?\)", desc_sddl)
res = [x for x in res if ("CI" in x) or ("OI" in x)]
for x in res:
desc_sddl = desc_sddl.replace(x, "")
# Add flag 'protected' in both DACL and SACL so no inherit ACEs
# can propagate from above
# remove SACL, we are not interested
desc_sddl = desc_sddl.replace(":AI", ":AIP")
self.sd_utils.modify_sd_on_dn(object_dn, desc_sddl)
# Verify all inheritable ACEs are gone
desc_sddl = self.sd_utils.get_sd_as_sddl(object_dn)
self.assertFalse("CI" in desc_sddl)
self.assertFalse("OI" in desc_sddl)
def test_200(self):
""" OU with protected flag and child group. See if the group has inherit ACEs.
"""
ou_dn = "OU=test_inherit_ou," + self.base_dn
group_dn = "CN=test_inherit_group," + ou_dn
# Create inheritable-free OU
self.create_clean_ou(ou_dn)
# Create group child object
self.ldb_admin.newgroup("test_inherit_group", groupou="OU=test_inherit_ou", grouptype=4)
# Make sure created group object contains NO inherit ACEs
desc_sddl = self.sd_utils.get_sd_as_sddl(group_dn)
self.assertFalse("ID" in desc_sddl)
def test_201(self):
""" OU with protected flag and no inherit ACEs, child group with custom descriptor.
Verify group has custom and default ACEs only.
"""
ou_dn = "OU=test_inherit_ou," + self.base_dn
group_dn = "CN=test_inherit_group," + ou_dn
# Create inheritable-free OU
self.create_clean_ou(ou_dn)
# Create group child object using custom security descriptor
sddl = "O:AUG:AUD:AI(D;;WP;;;DU)"
tmp_desc = security.descriptor.from_sddl(sddl, self.domain_sid)
self.ldb_admin.newgroup("test_inherit_group", groupou="OU=test_inherit_ou", grouptype=4, sd=tmp_desc)
# Make sure created group descriptor has NO additional ACEs
desc_sddl = self.sd_utils.get_sd_as_sddl(group_dn)
self.assertEqual(desc_sddl, sddl)
sddl = "O:AUG:AUD:AI(D;;CC;;;LG)"
self.sd_utils.modify_sd_on_dn(group_dn, sddl)
desc_sddl = self.sd_utils.get_sd_as_sddl(group_dn)
self.assertEqual(desc_sddl, sddl)
def test_202(self):
""" OU with protected flag and add couple non-inheritable ACEs, child group.
See if the group has any of the added ACEs.
"""
ou_dn = "OU=test_inherit_ou," + self.base_dn
group_dn = "CN=test_inherit_group," + ou_dn
# Create inheritable-free OU
self.create_clean_ou(ou_dn)
# Add some custom non-inheritable ACEs
mod = "(D;;WP;;;DU)(A;;RP;;;DU)"
moded = "(D;;CC;;;LG)"
self.sd_utils.dacl_add_ace(ou_dn, mod)
# Verify all inheritable ACEs are gone
desc_sddl = self.sd_utils.get_sd_as_sddl(ou_dn)
# Create group child object
self.ldb_admin.newgroup("test_inherit_group", groupou="OU=test_inherit_ou", grouptype=4)
# Make sure created group object contains NO inherit ACEs
# also make sure the added above non-inheritable ACEs are absent too
desc_sddl = self.sd_utils.get_sd_as_sddl(group_dn)
self.assertFalse("ID" in desc_sddl)
for x in re.findall("\(.*?\)", mod):
self.assertFalse(x in desc_sddl)
self.sd_utils.modify_sd_on_dn(group_dn, "D:" + moded)
desc_sddl = self.sd_utils.get_sd_as_sddl(group_dn)
self.assertFalse("ID" in desc_sddl)
for x in re.findall("\(.*?\)", mod):
self.assertFalse(x in desc_sddl)
def test_203(self):
""" OU with protected flag and add 'CI' ACE, child group.
See if the group has the added inherited ACE.
"""
ou_dn = "OU=test_inherit_ou," + self.base_dn
group_dn = "CN=test_inherit_group," + ou_dn
# Create inheritable-free OU
self.create_clean_ou(ou_dn)
# Add some custom 'CI' ACE
mod = "(D;CI;WP;;;DU)"
moded = "(D;;CC;;;LG)"
self.sd_utils.dacl_add_ace(ou_dn, mod)
desc_sddl = self.sd_utils.get_sd_as_sddl(ou_dn)
# Create group child object
tmp_desc = security.descriptor.from_sddl("O:AUG:AUD:AI(A;;CC;;;AU)", self.domain_sid)
self.ldb_admin.newgroup("test_inherit_group", groupou="OU=test_inherit_ou", grouptype=4, sd=tmp_desc)
# Make sure created group object contains only the above inherited ACE
# that we've added manually
desc_sddl = self.sd_utils.get_sd_as_sddl(group_dn)
mod = mod.replace(";CI;", ";CIID;")
self.assertTrue(mod in desc_sddl)
self.sd_utils.modify_sd_on_dn(group_dn, "D:" + moded)
desc_sddl = self.sd_utils.get_sd_as_sddl(group_dn)
self.assertTrue(moded in desc_sddl)
self.assertTrue(mod in desc_sddl)
def test_204(self):
""" OU with protected flag and add 'OI' ACE, child group.
See if the group has the added inherited ACE.
"""
ou_dn = "OU=test_inherit_ou," + self.base_dn
group_dn = "CN=test_inherit_group," + ou_dn
# Create inheritable-free OU
self.create_clean_ou(ou_dn)
# Add some custom 'CI' ACE
mod = "(D;OI;WP;;;DU)"
moded = "(D;;CC;;;LG)"
self.sd_utils.dacl_add_ace(ou_dn, mod)
desc_sddl = self.sd_utils.get_sd_as_sddl(ou_dn)
# Create group child object
tmp_desc = security.descriptor.from_sddl("O:AUG:AUD:AI(A;;CC;;;AU)", self.domain_sid)
self.ldb_admin.newgroup("test_inherit_group", groupou="OU=test_inherit_ou", grouptype=4, sd=tmp_desc)
# Make sure created group object contains only the above inherited ACE
# that we've added manually
desc_sddl = self.sd_utils.get_sd_as_sddl(group_dn)
mod = mod.replace(";OI;", ";OIIOID;") # change it how it's gonna look like
self.assertTrue(mod in desc_sddl)
self.sd_utils.modify_sd_on_dn(group_dn, "D:" +moded)
desc_sddl = self.sd_utils.get_sd_as_sddl(group_dn)
self.assertTrue(moded in desc_sddl)
self.assertTrue(mod in desc_sddl)
def test_205(self):
""" OU with protected flag and add 'OA' for GUID & 'CI' ACE, child group.
See if the group has the added inherited ACE.
"""
ou_dn = "OU=test_inherit_ou," + self.base_dn
group_dn = "CN=test_inherit_group," + ou_dn
# Create inheritable-free OU
self.create_clean_ou(ou_dn)
# Add some custom 'OA' for 'name' attribute & 'CI' ACE
mod = "(OA;CI;WP;bf967a0e-0de6-11d0-a285-00aa003049e2;;DU)"
moded = "(D;;CC;;;LG)"
self.sd_utils.dacl_add_ace(ou_dn, mod)
desc_sddl = self.sd_utils.get_sd_as_sddl(ou_dn)
# Create group child object
tmp_desc = security.descriptor.from_sddl("O:AUG:AUD:AI(A;;CC;;;AU)", self.domain_sid)
self.ldb_admin.newgroup("test_inherit_group", groupou="OU=test_inherit_ou", grouptype=4, sd=tmp_desc)
# Make sure created group object contains only the above inherited ACE
# that we've added manually
desc_sddl = self.sd_utils.get_sd_as_sddl(group_dn)
mod = mod.replace(";CI;", ";CIID;") # change it how it's gonna look like
self.assertTrue(mod in desc_sddl)
self.sd_utils.modify_sd_on_dn(group_dn, "D:" + moded)
desc_sddl = self.sd_utils.get_sd_as_sddl(group_dn)
self.assertTrue(moded in desc_sddl)
self.assertTrue(mod in desc_sddl)
def test_206(self):
""" OU with protected flag and add 'OA' for GUID & 'OI' ACE, child group.
See if the group has the added inherited ACE.
"""
ou_dn = "OU=test_inherit_ou," + self.base_dn
group_dn = "CN=test_inherit_group," + ou_dn
# Create inheritable-free OU
self.create_clean_ou(ou_dn)
# Add some custom 'OA' for 'name' attribute & 'OI' ACE
mod = "(OA;OI;WP;bf967a0e-0de6-11d0-a285-00aa003049e2;;DU)"
moded = "(D;;CC;;;LG)"
self.sd_utils.dacl_add_ace(ou_dn, mod)
desc_sddl = self.sd_utils.get_sd_as_sddl(ou_dn)
# Create group child object
tmp_desc = security.descriptor.from_sddl("O:AUG:AUD:AI(A;;CC;;;AU)", self.domain_sid)
self.ldb_admin.newgroup("test_inherit_group", groupou="OU=test_inherit_ou", grouptype=4, sd=tmp_desc)
# Make sure created group object contains only the above inherited ACE
# that we've added manually
desc_sddl = self.sd_utils.get_sd_as_sddl(group_dn)
mod = mod.replace(";OI;", ";OIIOID;") # change it how it's gonna look like
self.assertTrue(mod in desc_sddl)
self.sd_utils.modify_sd_on_dn(group_dn, "D:" + moded)
desc_sddl = self.sd_utils.get_sd_as_sddl(group_dn)
self.assertTrue(moded in desc_sddl)
self.assertTrue(mod in desc_sddl)
def test_207(self):
""" OU with protected flag and add 'OA' for OU specific GUID & 'CI' ACE, child group.
See if the group has the added inherited ACE.
"""
ou_dn = "OU=test_inherit_ou," + self.base_dn
group_dn = "CN=test_inherit_group," + ou_dn
# Create inheritable-free OU
self.create_clean_ou(ou_dn)
# Add some custom 'OA' for 'st' attribute (OU specific) & 'CI' ACE
mod = "(OA;CI;WP;bf967a39-0de6-11d0-a285-00aa003049e2;;DU)"
moded = "(D;;CC;;;LG)"
self.sd_utils.dacl_add_ace(ou_dn, mod)
desc_sddl = self.sd_utils.get_sd_as_sddl(ou_dn)
# Create group child object
tmp_desc = security.descriptor.from_sddl("O:AUG:AUD:AI(A;;CC;;;AU)", self.domain_sid)
self.ldb_admin.newgroup("test_inherit_group", groupou="OU=test_inherit_ou", grouptype=4, sd=tmp_desc)
# Make sure created group object contains only the above inherited ACE
# that we've added manually
desc_sddl = self.sd_utils.get_sd_as_sddl(group_dn)
mod = mod.replace(";CI;", ";CIID;") # change it how it's gonna look like
self.assertTrue(mod in desc_sddl)
self.sd_utils.modify_sd_on_dn(group_dn, "D:" + moded)
desc_sddl = self.sd_utils.get_sd_as_sddl(group_dn)
self.assertTrue(moded in desc_sddl)
self.assertTrue(mod in desc_sddl)
def test_208(self):
""" OU with protected flag and add 'OA' for OU specific GUID & 'OI' ACE, child group.
See if the group has the added inherited ACE.
"""
ou_dn = "OU=test_inherit_ou," + self.base_dn
group_dn = "CN=test_inherit_group," + ou_dn
# Create inheritable-free OU
self.create_clean_ou(ou_dn)
# Add some custom 'OA' for 'st' attribute (OU specific) & 'OI' ACE
mod = "(OA;OI;WP;bf967a39-0de6-11d0-a285-00aa003049e2;;DU)"
moded = "(D;;CC;;;LG)"
self.sd_utils.dacl_add_ace(ou_dn, mod)
desc_sddl = self.sd_utils.get_sd_as_sddl(ou_dn)
# Create group child object
tmp_desc = security.descriptor.from_sddl("O:AUG:AUD:AI(A;;CC;;;AU)", self.domain_sid)
self.ldb_admin.newgroup("test_inherit_group", groupou="OU=test_inherit_ou", grouptype=4, sd=tmp_desc)
# Make sure created group object contains only the above inherited ACE
# that we've added manually
desc_sddl = self.sd_utils.get_sd_as_sddl(group_dn)
mod = mod.replace(";OI;", ";OIIOID;") # change it how it's gonna look like
self.assertTrue(mod in desc_sddl)
self.sd_utils.modify_sd_on_dn(group_dn, "D:(OA;OI;WP;bf967a39-0de6-11d0-a285-00aa003049e2;;DU)" + moded)
desc_sddl = self.sd_utils.get_sd_as_sddl(group_dn)
self.assertTrue(moded in desc_sddl)
self.assertTrue(mod in desc_sddl)
def test_209(self):
""" OU with protected flag and add 'CI' ACE with 'CO' SID, child group.
See if the group has the added inherited ACE.
"""
ou_dn = "OU=test_inherit_ou," + self.base_dn
group_dn = "CN=test_inherit_group," + ou_dn
# Create inheritable-free OU
self.create_clean_ou(ou_dn)
# Add some custom 'CI' ACE
mod = "(D;CI;WP;;;CO)"
moded = "(D;;CC;;;LG)"
self.sd_utils.dacl_add_ace(ou_dn, mod)
desc_sddl = self.sd_utils.get_sd_as_sddl(ou_dn)
# Create group child object
tmp_desc = security.descriptor.from_sddl("O:AUG:AUD:AI(A;;CC;;;AU)", self.domain_sid)
self.ldb_admin.newgroup("test_inherit_group", groupou="OU=test_inherit_ou", grouptype=4, sd=tmp_desc)
# Make sure created group object contains only the above inherited ACE(s)
# that we've added manually
desc_sddl = self.sd_utils.get_sd_as_sddl(group_dn)
self.assertTrue("(D;ID;WP;;;AU)" in desc_sddl)
self.assertTrue("(D;CIIOID;WP;;;CO)" in desc_sddl)
self.sd_utils.modify_sd_on_dn(group_dn, "D:" + moded)
desc_sddl = self.sd_utils.get_sd_as_sddl(group_dn)
self.assertTrue(moded in desc_sddl)
self.assertTrue("(D;ID;WP;;;DA)" in desc_sddl)
self.assertTrue("(D;CIIOID;WP;;;CO)" in desc_sddl)
def test_210(self):
""" OU with protected flag, provide ACEs with ID flag raised. Should be ignored.
"""
ou_dn = "OU=test_inherit_ou," + self.base_dn
group_dn = "CN=test_inherit_group," + ou_dn
self.create_clean_ou(ou_dn)
# Add some custom ACE
mod = "D:(D;CIIO;WP;;;CO)(A;ID;WP;;;AU)"
tmp_desc = security.descriptor.from_sddl(mod, self.domain_sid)
self.ldb_admin.newgroup("test_inherit_group", groupou="OU=test_inherit_ou", grouptype=4, sd=tmp_desc)
# Make sure created group object does not contain the ID ace
desc_sddl = self.sd_utils.get_sd_as_sddl(group_dn)
self.assertFalse("(A;ID;WP;;;AU)" in desc_sddl)
def test_211(self):
""" Provide ACE with CO SID, should be expanded and replaced
"""
ou_dn = "OU=test_inherit_ou," + self.base_dn
group_dn = "CN=test_inherit_group," + ou_dn
# Create inheritable-free OU
self.create_clean_ou(ou_dn)
# Add some custom 'CI' ACE
mod = "D:(D;CI;WP;;;CO)"
tmp_desc = security.descriptor.from_sddl(mod, self.domain_sid)
self.ldb_admin.newgroup("test_inherit_group", groupou="OU=test_inherit_ou", grouptype=4, sd=tmp_desc)
desc_sddl = self.sd_utils.get_sd_as_sddl(group_dn)
self.assertTrue("(D;;WP;;;DA)" in desc_sddl)
self.assertTrue("(D;CIIO;WP;;;CO)" in desc_sddl)
def test_212(self):
""" Provide ACE with IO flag, should be ignored
"""
ou_dn = "OU=test_inherit_ou," + self.base_dn
group_dn = "CN=test_inherit_group," + ou_dn
# Create inheritable-free OU
self.create_clean_ou(ou_dn)
# Add some custom 'CI' ACE
mod = "D:(D;CIIO;WP;;;CO)"
tmp_desc = security.descriptor.from_sddl(mod, self.domain_sid)
self.ldb_admin.newgroup("test_inherit_group", groupou="OU=test_inherit_ou", grouptype=4, sd=tmp_desc)
# Make sure created group object contains only the above inherited ACE(s)
# that we've added manually
desc_sddl = self.sd_utils.get_sd_as_sddl(group_dn)
self.assertTrue("(D;CIIO;WP;;;CO)" in desc_sddl)
self.assertFalse("(D;;WP;;;DA)" in desc_sddl)
self.assertFalse("(D;CIIO;WP;;;CO)(D;CIIO;WP;;;CO)" in desc_sddl)
def test_213(self):
""" Provide ACE with IO flag, should be ignored
"""
ou_dn = "OU=test_inherit_ou," + self.base_dn
group_dn = "CN=test_inherit_group," + ou_dn
# Create inheritable-free OU
self.create_clean_ou(ou_dn)
mod = "D:(D;IO;WP;;;DA)"
tmp_desc = security.descriptor.from_sddl(mod, self.domain_sid)
self.ldb_admin.newgroup("test_inherit_group", groupou="OU=test_inherit_ou", grouptype=4, sd=tmp_desc)
# Make sure created group object contains only the above inherited ACE(s)
# that we've added manually
desc_sddl = self.sd_utils.get_sd_as_sddl(group_dn)
self.assertFalse("(D;IO;WP;;;DA)" in desc_sddl)
def test_214(self):
""" Test behavior of ACEs containing generic rights
"""
ou_dn = "OU=test_inherit_ou_p," + self.base_dn
ou_dn1 = "OU=test_inherit_ou1," + ou_dn
ou_dn2 = "OU=test_inherit_ou2," + ou_dn
ou_dn3 = "OU=test_inherit_ou3," + ou_dn
ou_dn4 = "OU=test_inherit_ou4," + ou_dn
ou_dn5 = "OU=test_inherit_ou5," + ou_dn1
ou_dn6 = "OU=test_inherit_ou6," + ou_dn2
# Create inheritable-free OU
mod = "D:P(A;CI;WPRPLCCCDCWDRC;;;DA)"
tmp_desc = security.descriptor.from_sddl(mod, self.domain_sid)
self.ldb_admin.create_ou(ou_dn, sd=tmp_desc)
mod = "D:(A;CI;GA;;;DU)"
tmp_desc = security.descriptor.from_sddl(mod, self.domain_sid)
self.ldb_admin.create_ou(ou_dn1, sd=tmp_desc)
mod = "D:(A;CIIO;GA;;;DU)"
tmp_desc = security.descriptor.from_sddl(mod, self.domain_sid)
self.ldb_admin.create_ou(ou_dn2, sd=tmp_desc)
mod = "D:(A;;GA;;;DU)"
tmp_desc = security.descriptor.from_sddl(mod, self.domain_sid)
self.ldb_admin.create_ou(ou_dn3, sd=tmp_desc)
mod = "D:(A;IO;GA;;;DU)"
tmp_desc = security.descriptor.from_sddl(mod, self.domain_sid)
self.ldb_admin.create_ou(ou_dn4, sd=tmp_desc)
self.ldb_admin.create_ou(ou_dn5)
self.ldb_admin.create_ou(ou_dn6)
desc_sddl = self.sd_utils.get_sd_as_sddl(ou_dn1)
self.assertTrue("(A;;RPWPCRCCDCLCLORCWOWDSDDTSW;;;DU)" in desc_sddl)
self.assertTrue("(A;CIIO;GA;;;DU)" in desc_sddl)
desc_sddl = self.sd_utils.get_sd_as_sddl(ou_dn2)
self.assertFalse("(A;;RPWPCRCCDCLCLORCWOWDSDDTSW;;;DU)" in desc_sddl)
self.assertTrue("(A;CIIO;GA;;;DU)" in desc_sddl)
desc_sddl = self.sd_utils.get_sd_as_sddl(ou_dn3)
self.assertTrue("(A;;RPWPCRCCDCLCLORCWOWDSDDTSW;;;DU)" in desc_sddl)
self.assertFalse("(A;CIIO;GA;;;DU)" in desc_sddl)
desc_sddl = self.sd_utils.get_sd_as_sddl(ou_dn4)
self.assertFalse("(A;;RPWPCRCCDCLCLORCWOWDSDDTSW;;;DU)" in desc_sddl)
self.assertFalse("(A;CIIO;GA;;;DU)" in desc_sddl)
desc_sddl = self.sd_utils.get_sd_as_sddl(ou_dn5)
self.assertTrue("(A;ID;RPWPCRCCDCLCLORCWOWDSDDTSW;;;DU)" in desc_sddl)
self.assertTrue("(A;CIIOID;GA;;;DU)" in desc_sddl)
desc_sddl = self.sd_utils.get_sd_as_sddl(ou_dn6)
self.assertTrue("(A;ID;RPWPCRCCDCLCLORCWOWDSDDTSW;;;DU)" in desc_sddl)
self.assertTrue("(A;CIIOID;GA;;;DU)" in desc_sddl)
def test_215(self):
""" Make sure IO flag is removed in child objects
"""
ou_dn = "OU=test_inherit_ou_p," + self.base_dn
ou_dn1 = "OU=test_inherit_ou1," + ou_dn
ou_dn5 = "OU=test_inherit_ou5," + ou_dn1
# Create inheritable-free OU
mod = "D:P(A;CI;WPRPLCCCDCWDRC;;;DA)"
tmp_desc = security.descriptor.from_sddl(mod, self.domain_sid)
self.ldb_admin.create_ou(ou_dn, sd=tmp_desc)
mod = "D:(A;CIIO;WP;;;DU)"
tmp_desc = security.descriptor.from_sddl(mod, self.domain_sid)
self.ldb_admin.create_ou(ou_dn1, sd=tmp_desc)
self.ldb_admin.create_ou(ou_dn5)
desc_sddl = self.sd_utils.get_sd_as_sddl(ou_dn5)
self.assertTrue("(A;CIID;WP;;;DU)" in desc_sddl)
self.assertFalse("(A;CIIOID;WP;;;DU)" in desc_sddl)
def test_216(self):
""" Make sure ID ACES provided by user are ignored
"""
ou_dn = "OU=test_inherit_ou," + self.base_dn
group_dn = "CN=test_inherit_group," + ou_dn
mod = "D:P(A;;WPRPLCCCDCWDRC;;;DA)"
tmp_desc = security.descriptor.from_sddl(mod, self.domain_sid)
self.ldb_admin.create_ou(ou_dn, sd=tmp_desc)
# Add some custom ACE
mod = "D:(D;ID;WP;;;AU)"
tmp_desc = security.descriptor.from_sddl(mod, self.domain_sid)
self.ldb_admin.newgroup("test_inherit_group", groupou="OU=test_inherit_ou", grouptype=4, sd=tmp_desc)
# Make sure created group object does not contain the ID ace
desc_sddl = self.sd_utils.get_sd_as_sddl(group_dn)
self.assertFalse("(A;ID;WP;;;AU)" in desc_sddl)
self.assertFalse("(A;;WP;;;AU)" in desc_sddl)
def test_217(self):
""" Make sure ID ACES provided by user are not ignored if P flag is set
"""
ou_dn = "OU=test_inherit_ou," + self.base_dn
group_dn = "CN=test_inherit_group," + ou_dn
mod = "D:P(A;;WPRPLCCCDCWDRC;;;DA)"
tmp_desc = security.descriptor.from_sddl(mod, self.domain_sid)
self.ldb_admin.create_ou(ou_dn, sd=tmp_desc)
# Add some custom ACE
mod = "D:P(A;ID;WP;;;AU)"
tmp_desc = security.descriptor.from_sddl(mod, self.domain_sid)
self.ldb_admin.newgroup("test_inherit_group", groupou="OU=test_inherit_ou", grouptype=4, sd=tmp_desc)
# Make sure created group object does not contain the ID ace
desc_sddl = self.sd_utils.get_sd_as_sddl(group_dn)
self.assertFalse("(A;ID;WP;;;AU)" in desc_sddl)
self.assertTrue("(A;;WP;;;AU)" in desc_sddl)
########################################################################################
class SdFlagsDescriptorTests(DescriptorTests):
def deleteAll(self):
delete_force(self.ldb_admin, "OU=test_sdflags_ou," + self.base_dn)
def setUp(self):
super(SdFlagsDescriptorTests, self).setUp()
self.test_descr = "O:AUG:AUD:(D;;CC;;;LG)S:(OU;;WP;;;AU)"
self.deleteAll()
def test_301(self):
""" Modify a descriptor with OWNER_SECURITY_INFORMATION set.
See that only the owner has been changed.
"""
ou_dn = "OU=test_sdflags_ou," + self.base_dn
self.ldb_admin.create_ou(ou_dn)
self.sd_utils.modify_sd_on_dn(ou_dn, self.test_descr, controls=["sd_flags:1:%d" % (SECINFO_OWNER)])
desc_sddl = self.sd_utils.get_sd_as_sddl(ou_dn)
# make sure we have modified the owner
self.assertTrue("O:AU" in desc_sddl)
# make sure nothing else has been modified
self.assertFalse("G:AU" in desc_sddl)
self.assertFalse("D:(D;;CC;;;LG)" in desc_sddl)
self.assertFalse("(OU;;WP;;;AU)" in desc_sddl)
def test_302(self):
""" Modify a descriptor with GROUP_SECURITY_INFORMATION set.
See that only the owner has been changed.
"""
ou_dn = "OU=test_sdflags_ou," + self.base_dn
self.ldb_admin.create_ou(ou_dn)
self.sd_utils.modify_sd_on_dn(ou_dn, self.test_descr, controls=["sd_flags:1:%d" % (SECINFO_GROUP)])
desc_sddl = self.sd_utils.get_sd_as_sddl(ou_dn)
# make sure we have modified the group
self.assertTrue("G:AU" in desc_sddl)
# make sure nothing else has been modified
self.assertFalse("O:AU" in desc_sddl)
self.assertFalse("D:(D;;CC;;;LG)" in desc_sddl)
self.assertFalse("(OU;;WP;;;AU)" in desc_sddl)
def test_303(self):
""" Modify a descriptor with SACL_SECURITY_INFORMATION set.
See that only the owner has been changed.
"""
ou_dn = "OU=test_sdflags_ou," + self.base_dn
self.ldb_admin.create_ou(ou_dn)
self.sd_utils.modify_sd_on_dn(ou_dn, self.test_descr, controls=["sd_flags:1:%d" % (SECINFO_DACL)])
desc_sddl = self.sd_utils.get_sd_as_sddl(ou_dn)
# make sure we have modified the DACL
self.assertTrue("(D;;CC;;;LG)" in desc_sddl)
# make sure nothing else has been modified
self.assertFalse("O:AU" in desc_sddl)
self.assertFalse("G:AU" in desc_sddl)
self.assertFalse("(OU;;WP;;;AU)" in desc_sddl)
def test_304(self):
""" Modify a descriptor with SACL_SECURITY_INFORMATION set.
See that only the owner has been changed.
"""
ou_dn = "OU=test_sdflags_ou," + self.base_dn
self.ldb_admin.create_ou(ou_dn)
self.sd_utils.modify_sd_on_dn(ou_dn, self.test_descr, controls=["sd_flags:1:%d" % (SECINFO_SACL)])
desc_sddl = self.sd_utils.get_sd_as_sddl(ou_dn)
# make sure we have modified the DACL
self.assertTrue("(OU;;WP;;;AU)" in desc_sddl)
# make sure nothing else has been modified
self.assertFalse("O:AU" in desc_sddl)
self.assertFalse("G:AU" in desc_sddl)
self.assertFalse("(D;;CC;;;LG)" in desc_sddl)
def test_305(self):
""" Modify a descriptor with 0x0 set.
Contrary to logic this is interpreted as no control,
which is the same as 0xF
"""
ou_dn = "OU=test_sdflags_ou," + self.base_dn
self.ldb_admin.create_ou(ou_dn)
self.sd_utils.modify_sd_on_dn(ou_dn, self.test_descr, controls=["sd_flags:1:0"])
desc_sddl = self.sd_utils.get_sd_as_sddl(ou_dn)
# make sure we have modified the DACL
self.assertTrue("(OU;;WP;;;AU)" in desc_sddl)
# make sure nothing else has been modified
self.assertTrue("O:AU" in desc_sddl)
self.assertTrue("G:AU" in desc_sddl)
self.assertTrue("(D;;CC;;;LG)" in desc_sddl)
def test_306(self):
""" Modify a descriptor with 0xF set.
"""
ou_dn = "OU=test_sdflags_ou," + self.base_dn
self.ldb_admin.create_ou(ou_dn)
self.sd_utils.modify_sd_on_dn(ou_dn, self.test_descr, controls=["sd_flags:1:15"])
desc_sddl = self.sd_utils.get_sd_as_sddl(ou_dn)
# make sure we have modified the DACL
self.assertTrue("(OU;;WP;;;AU)" in desc_sddl)
# make sure nothing else has been modified
self.assertTrue("O:AU" in desc_sddl)
self.assertTrue("G:AU" in desc_sddl)
self.assertTrue("(D;;CC;;;LG)" in desc_sddl)
def test_307(self):
""" Read a descriptor with OWNER_SECURITY_INFORMATION
Only the owner part should be returned.
"""
ou_dn = "OU=test_sdflags_ou," + self.base_dn
self.ldb_admin.create_ou(ou_dn)
desc_sddl = self.sd_utils.get_sd_as_sddl(ou_dn, controls=["sd_flags:1:%d" % (SECINFO_OWNER)])
# make sure we have read the owner
self.assertTrue("O:" in desc_sddl)
# make sure we have read nothing else
self.assertFalse("G:" in desc_sddl)
self.assertFalse("D:" in desc_sddl)
self.assertFalse("S:" in desc_sddl)
def test_308(self):
""" Read a descriptor with GROUP_SECURITY_INFORMATION
Only the group part should be returned.
"""
ou_dn = "OU=test_sdflags_ou," + self.base_dn
self.ldb_admin.create_ou(ou_dn)
desc_sddl = self.sd_utils.get_sd_as_sddl(ou_dn, controls=["sd_flags:1:%d" % (SECINFO_GROUP)])
# make sure we have read the owner
self.assertTrue("G:" in desc_sddl)
# make sure we have read nothing else
self.assertFalse("O:" in desc_sddl)
self.assertFalse("D:" in desc_sddl)
self.assertFalse("S:" in desc_sddl)
def test_309(self):
""" Read a descriptor with SACL_SECURITY_INFORMATION
Only the sacl part should be returned.
"""
ou_dn = "OU=test_sdflags_ou," + self.base_dn
self.ldb_admin.create_ou(ou_dn)
desc_sddl = self.sd_utils.get_sd_as_sddl(ou_dn, controls=["sd_flags:1:%d" % (SECINFO_SACL)])
# make sure we have read the owner
self.assertTrue("S:" in desc_sddl)
# make sure we have read nothing else
self.assertFalse("O:" in desc_sddl)
self.assertFalse("D:" in desc_sddl)
self.assertFalse("G:" in desc_sddl)
def test_310(self):
""" Read a descriptor with DACL_SECURITY_INFORMATION
Only the dacl part should be returned.
"""
ou_dn = "OU=test_sdflags_ou," + self.base_dn
self.ldb_admin.create_ou(ou_dn)
desc_sddl = self.sd_utils.get_sd_as_sddl(ou_dn, controls=["sd_flags:1:%d" % (SECINFO_DACL)])
# make sure we have read the owner
self.assertTrue("D:" in desc_sddl)
# make sure we have read nothing else
self.assertFalse("O:" in desc_sddl)
self.assertFalse("S:" in desc_sddl)
self.assertFalse("G:" in desc_sddl)
class RightsAttributesTests(DescriptorTests):
def deleteAll(self):
delete_force(self.ldb_admin, self.get_users_domain_dn("testuser_attr"))
delete_force(self.ldb_admin, self.get_users_domain_dn("testuser_attr2"))
delete_force(self.ldb_admin, "OU=test_domain_ou1," + self.base_dn)
def setUp(self):
super(RightsAttributesTests, self).setUp()
self.deleteAll()
### Create users
# User 1
self.ldb_admin.newuser("testuser_attr", "samba123@")
# User 2, Domain Admins
self.ldb_admin.newuser("testuser_attr2", "samba123@")
self.ldb_admin.add_remove_group_members("Domain Admins",
"testuser_attr2",
add_members_operation=True)
def test_sDRightsEffective(self):
object_dn = "OU=test_domain_ou1," + self.base_dn
delete_force(self.ldb_admin, object_dn)
self.ldb_admin.create_ou(object_dn)
print self.get_users_domain_dn("testuser_attr")
user_sid = self.sd_utils.get_object_sid(self.get_users_domain_dn("testuser_attr"))
#give testuser1 read access so attributes can be retrieved
mod = "(A;CI;RP;;;%s)" % str(user_sid)
self.sd_utils.dacl_add_ace(object_dn, mod)
_ldb = self.get_ldb_connection("testuser_attr", "samba123@")
res = _ldb.search(base=object_dn, expression="", scope=SCOPE_BASE,
attrs=["sDRightsEffective"])
#user whould have no rights at all
self.assertEquals(len(res), 1)
self.assertEquals(res[0]["sDRightsEffective"][0], "0")
#give the user Write DACL and see what happens
mod = "(A;CI;WD;;;%s)" % str(user_sid)
self.sd_utils.dacl_add_ace(object_dn, mod)
res = _ldb.search(base=object_dn, expression="", scope=SCOPE_BASE,
attrs=["sDRightsEffective"])
#user whould have DACL_SECURITY_INFORMATION
self.assertEquals(len(res), 1)
self.assertEquals(res[0]["sDRightsEffective"][0], ("%d") % SECINFO_DACL)
#give the user Write Owners and see what happens
mod = "(A;CI;WO;;;%s)" % str(user_sid)
self.sd_utils.dacl_add_ace(object_dn, mod)
res = _ldb.search(base=object_dn, expression="", scope=SCOPE_BASE,
attrs=["sDRightsEffective"])
#user whould have DACL_SECURITY_INFORMATION, OWNER_SECURITY_INFORMATION, GROUP_SECURITY_INFORMATION
self.assertEquals(len(res), 1)
self.assertEquals(res[0]["sDRightsEffective"][0], ("%d") % (SECINFO_DACL | SECINFO_GROUP | SECINFO_OWNER))
#no way to grant security privilege bu adding ACE's so we use a memeber of Domain Admins
_ldb = self.get_ldb_connection("testuser_attr2", "samba123@")
res = _ldb.search(base=object_dn, expression="", scope=SCOPE_BASE,
attrs=["sDRightsEffective"])
#user whould have DACL_SECURITY_INFORMATION, OWNER_SECURITY_INFORMATION, GROUP_SECURITY_INFORMATION
self.assertEquals(len(res), 1)
self.assertEquals(res[0]["sDRightsEffective"][0], \
("%d") % (SECINFO_DACL | SECINFO_GROUP | SECINFO_OWNER | SECINFO_SACL))
def test_allowedChildClassesEffective(self):
object_dn = "OU=test_domain_ou1," + self.base_dn
delete_force(self.ldb_admin, object_dn)
self.ldb_admin.create_ou(object_dn)
user_sid = self.sd_utils.get_object_sid(self.get_users_domain_dn("testuser_attr"))
#give testuser1 read access so attributes can be retrieved
mod = "(A;CI;RP;;;%s)" % str(user_sid)
self.sd_utils.dacl_add_ace(object_dn, mod)
_ldb = self.get_ldb_connection("testuser_attr", "samba123@")
res = _ldb.search(base=object_dn, expression="", scope=SCOPE_BASE,
attrs=["allowedChildClassesEffective"])
#there should be no allowed child classes
self.assertEquals(len(res), 1)
self.assertFalse("allowedChildClassesEffective" in res[0].keys())
#give the user the right to create children of type user
mod = "(OA;CI;CC;bf967aba-0de6-11d0-a285-00aa003049e2;;%s)" % str(user_sid)
self.sd_utils.dacl_add_ace(object_dn, mod)
res = _ldb.search(base=object_dn, expression="", scope=SCOPE_BASE,
attrs=["allowedChildClassesEffective"])
# allowedChildClassesEffective should only have one value, user
self.assertEquals(len(res), 1)
self.assertEquals(len(res[0]["allowedChildClassesEffective"]), 1)
self.assertEquals(res[0]["allowedChildClassesEffective"][0], "user")
def test_allowedAttributesEffective(self):
object_dn = "OU=test_domain_ou1," + self.base_dn
delete_force(self.ldb_admin, object_dn)
self.ldb_admin.create_ou(object_dn)
user_sid = self.sd_utils.get_object_sid(self.get_users_domain_dn("testuser_attr"))
#give testuser1 read access so attributes can be retrieved
mod = "(A;CI;RP;;;%s)" % str(user_sid)
self.sd_utils.dacl_add_ace(object_dn, mod)
_ldb = self.get_ldb_connection("testuser_attr", "samba123@")
res = _ldb.search(base=object_dn, expression="", scope=SCOPE_BASE,
attrs=["allowedAttributesEffective"])
#there should be no allowed attributes
self.assertEquals(len(res), 1)
self.assertFalse("allowedAttributesEffective" in res[0].keys())
#give the user the right to write displayName and managedBy
mod2 = "(OA;CI;WP;bf967953-0de6-11d0-a285-00aa003049e2;;%s)" % str(user_sid)
mod = "(OA;CI;WP;0296c120-40da-11d1-a9c0-0000f80367c1;;%s)" % str(user_sid)
# also rights to modify an read only attribute, fromEntry
mod3 = "(OA;CI;WP;9a7ad949-ca53-11d1-bbd0-0080c76670c0;;%s)" % str(user_sid)
self.sd_utils.dacl_add_ace(object_dn, mod + mod2 + mod3)
res = _ldb.search(base=object_dn, expression="", scope=SCOPE_BASE,
attrs=["allowedAttributesEffective"])
# value should only contain user and managedBy
self.assertEquals(len(res), 1)
self.assertEquals(len(res[0]["allowedAttributesEffective"]), 2)
self.assertTrue("displayName" in res[0]["allowedAttributesEffective"])
self.assertTrue("managedBy" in res[0]["allowedAttributesEffective"])
if not "://" in host:
if os.path.isfile(host):
host = "tdb://%s" % host
else:
host = "ldap://%s" % host
# use 'paged_search' module when connecting remotely
if host.lower().startswith("ldap://"):
ldb_options = ["modules:paged_searches"]
ldb = SamDB(host,
credentials=creds,
session_info=system_session(lp),
lp=lp,
options=ldb_options)
runner = SubunitTestRunner()
rc = 0
if not runner.run(unittest.makeSuite(OwnerGroupDescriptorTests)).wasSuccessful():
rc = 1
if not runner.run(unittest.makeSuite(DaclDescriptorTests)).wasSuccessful():
rc = 1
if not runner.run(unittest.makeSuite(SdFlagsDescriptorTests)).wasSuccessful():
rc = 1
if not runner.run(unittest.makeSuite(RightsAttributesTests)).wasSuccessful():
rc = 1
sys.exit(rc)
| 49.406234
| 125
| 0.638981
| 13,602
| 98,269
| 4.346126
| 0.043523
| 0.039245
| 0.032749
| 0.028182
| 0.880523
| 0.865485
| 0.85065
| 0.838402
| 0.828862
| 0.807193
| 0
| 0.018261
| 0.231538
| 98,269
| 1,988
| 126
| 49.431087
| 0.764566
| 0.115754
| 0
| 0.710435
| 0
| 0
| 0.161021
| 0.052029
| 0
| 0
| 0
| 0
| 0.120249
| 0
| null | null | 0.004147
| 0.014513
| null | null | 0.002764
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 8
|
b3ed0241311e098a2f9b771c3c08edc868186331
| 37
|
py
|
Python
|
src/lib/runpy.py
|
DTenore/skulpt
|
098d20acfb088d6db85535132c324b7ac2f2d212
|
[
"MIT"
] | 2,671
|
2015-01-03T08:23:25.000Z
|
2022-03-31T06:15:48.000Z
|
src/lib/runpy.py
|
wakeupmuyunhe/skulpt
|
a8fb11a80fb6d7c016bab5dfe3712517a350b347
|
[
"MIT"
] | 972
|
2015-01-05T08:11:00.000Z
|
2022-03-29T13:47:15.000Z
|
src/lib/runpy.py
|
wakeupmuyunhe/skulpt
|
a8fb11a80fb6d7c016bab5dfe3712517a350b347
|
[
"MIT"
] | 845
|
2015-01-03T19:53:36.000Z
|
2022-03-29T18:34:22.000Z
|
import _sk_fail; _sk_fail._("runpy")
| 18.5
| 36
| 0.756757
| 6
| 37
| 3.833333
| 0.666667
| 0.521739
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.081081
| 37
| 1
| 37
| 37
| 0.676471
| 0
| 0
| 0
| 0
| 0
| 0.135135
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| true
| 0
| 1
| 0
| 1
| 0
| 1
| 1
| 0
| null | 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 0
| 1
| 0
|
0
| 7
|
375e0870a84a2df4600e7596f1cfbf306bd4eb0c
| 2,130
|
py
|
Python
|
django-pfiProject-docker/meas_web/models.py
|
RENCI-NRIG/kafka-docker
|
f7fdd39770fbd04bfcf505b55f54ece2d08de1f8
|
[
"MIT"
] | 2
|
2020-08-19T15:43:31.000Z
|
2020-08-19T16:03:36.000Z
|
django-pfiProject-docker/meas_web/models.py
|
RENCI-NRIG/kafka-docker
|
f7fdd39770fbd04bfcf505b55f54ece2d08de1f8
|
[
"MIT"
] | 13
|
2019-11-01T18:39:04.000Z
|
2022-02-26T18:04:37.000Z
|
django-pfiProject-docker/meas_web/models.py
|
RENCI-NRIG/kafka-docker
|
f7fdd39770fbd04bfcf505b55f54ece2d08de1f8
|
[
"MIT"
] | null | null | null |
# Create your models here.
# -*- coding: utf-8 -*-
from __future__ import unicode_literals
from django.contrib.gis.db import models
# Create your models here.
STATUS_CHOICES = (
('d', 'Draft'),
('p', 'Published')
)
class mscnt(models.Model):
bore_id = models.CharField(max_length=20)
job_id = models.CharField(max_length=20)
instrument = models.CharField(max_length=20)
chemical_id = models.CharField(max_length=20)
measurement_value = models.CharField(max_length=20)
units = models.CharField(max_length=20)
date = models.DateField('date')
time = models.TimeField('time')
status = models.CharField(max_length=1, choices=STATUS_CHOICES, default='d')
comment = models.CharField(max_length=1000, null=True, default="")
geom = models.PointField(null=False)
def toString(self):
#{DeviceX0001 2018-05-17 09:21:20.3 51.517016 -0.144819} {JobY001 CH5OH 10000.0 ppm}
retVal = "{" + self.instrument + " " + str(self.date) + " " + str(self.time) + "} {" + self.bore_id + " " + self.job_id + " " + self.chemical_id + " " + self.measurement_value + " " + self.units + " " + self.geom + "}"
return retVal
class gcmv(models.Model):
bore_id = models.CharField(max_length=20)
job_id = models.CharField(max_length=20)
instrument = models.CharField(max_length=20)
chemical_id = models.CharField(max_length=20)
measurement_value = models.CharField(max_length=20)
units = models.CharField(max_length=20)
date = models.DateField('date')
time = models.TimeField('time')
status = models.CharField(max_length=1, choices=STATUS_CHOICES, default='d')
comment = models.CharField(max_length=1000, null=True, default="")
geom = models.PointField(null=False)
def toString(self):
#{DeviceX0001 2018-05-17 09:21:20.3 51.517016 -0.144819} {JobY001 CH5OH 10000.0 ppm}
retVal = "{" + self.instrument + " " + str(self.date) + " " + str(self.time) + "} {" + self.bore_id + " " + self.job_id + " " + self.chemical_id + " " + self.measurement_value + " " + self.units + " " + self.geom + "}"
return retVal
| 44.375
| 231
| 0.662441
| 277
| 2,130
| 4.949458
| 0.259928
| 0.175055
| 0.210066
| 0.280088
| 0.881109
| 0.881109
| 0.881109
| 0.881109
| 0.881109
| 0.881109
| 0
| 0.070812
| 0.184507
| 2,130
| 47
| 232
| 45.319149
| 0.71848
| 0.111268
| 0
| 0.777778
| 0
| 0
| 0.030753
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0.055556
| false
| 0
| 0.055556
| 0
| 0.833333
| 0
| 0
| 0
| 0
| null | 0
| 1
| 1
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
|
0
| 10
|
805c481dc5c99e7cc8577a218658e6d71fb8ce38
| 1,299
|
py
|
Python
|
benchmark/test_ghostnet.py
|
Oneflow-Inc/vision
|
352e9240f63118112ea174bb2d0b502fa54be16f
|
[
"BSD-3-Clause"
] | 40
|
2021-10-19T02:34:56.000Z
|
2022-03-25T07:49:44.000Z
|
benchmark/test_ghostnet.py
|
Oneflow-Inc/vision
|
352e9240f63118112ea174bb2d0b502fa54be16f
|
[
"BSD-3-Clause"
] | 53
|
2021-10-22T02:24:44.000Z
|
2022-03-31T04:20:47.000Z
|
benchmark/test_ghostnet.py
|
Oneflow-Inc/vision
|
352e9240f63118112ea174bb2d0b502fa54be16f
|
[
"BSD-3-Clause"
] | 11
|
2022-01-06T02:57:07.000Z
|
2022-03-23T15:19:51.000Z
|
from benchmark import *
import oneflow_benchmark
from flowvision.models.ghostnet import ghostnet
@oneflow_benchmark.ci_settings(compare={"median": "5%"})
def test_ghostnet_batch_size1(benchmark, net=ghostnet, input_shape=[1, 3, 224, 224]):
model, x, optimizer = fetch_args(net, input_shape)
benchmark(run, model, x, optimizer)
@oneflow_benchmark.ci_settings(compare={"median": "5%"})
def test_ghostnet_batch_size2(benchmark, net=ghostnet, input_shape=[2, 3, 224, 224]):
model, x, optimizer = fetch_args(net, input_shape)
benchmark(run, model, x, optimizer)
@oneflow_benchmark.ci_settings(compare={"median": "5%"})
def test_ghostnet_batch_size4(benchmark, net=ghostnet, input_shape=[4, 3, 224, 224]):
model, x, optimizer = fetch_args(net, input_shape)
benchmark(run, model, x, optimizer)
@oneflow_benchmark.ci_settings(compare={"median": "5%"})
def test_ghostnet_batch_size8(benchmark, net=ghostnet, input_shape=[8, 3, 224, 224]):
model, x, optimizer = fetch_args(net, input_shape)
benchmark(run, model, x, optimizer)
@oneflow_benchmark.ci_settings(compare={"median": "5%"})
def test_ghostnet_batch_size16(benchmark, net=ghostnet, input_shape=[16, 3, 224, 224]):
model, x, optimizer = fetch_args(net, input_shape)
benchmark(run, model, x, optimizer)
| 38.205882
| 87
| 0.7398
| 182
| 1,299
| 5.054945
| 0.197802
| 0.108696
| 0.163043
| 0.141304
| 0.875
| 0.711957
| 0.711957
| 0.711957
| 0.711957
| 0.711957
| 0
| 0.045455
| 0.119323
| 1,299
| 33
| 88
| 39.363636
| 0.758741
| 0
| 0
| 0.652174
| 0
| 0
| 0.030793
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0.217391
| false
| 0
| 0.130435
| 0
| 0.347826
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 7
|
809707b614a28dec9c230af3291d24c0984331d0
| 62
|
py
|
Python
|
roscraco/router/zyxel/__init__.py
|
spantaleev/roscraco
|
87a5a7c54931d5586fd7d30c8c67a699bef69c1f
|
[
"BSD-3-Clause"
] | 13
|
2015-03-01T00:39:43.000Z
|
2020-09-06T09:32:52.000Z
|
roscraco/router/zyxel/__init__.py
|
spantaleev/roscraco
|
87a5a7c54931d5586fd7d30c8c67a699bef69c1f
|
[
"BSD-3-Clause"
] | 3
|
2015-08-08T01:34:35.000Z
|
2017-05-14T11:07:50.000Z
|
roscraco/router/zyxel/__init__.py
|
spantaleev/roscraco
|
87a5a7c54931d5586fd7d30c8c67a699bef69c1f
|
[
"BSD-3-Clause"
] | 11
|
2015-01-29T03:21:08.000Z
|
2020-06-30T17:05:19.000Z
|
from .p320w import Zyxel_P320W
from .p330w import Zyxel_P330W
| 20.666667
| 30
| 0.83871
| 10
| 62
| 5
| 0.5
| 0.44
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.222222
| 0.129032
| 62
| 2
| 31
| 31
| 0.703704
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| true
| 0
| 1
| 0
| 1
| 0
| 1
| 0
| 0
| null | 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 0
| 1
| 0
|
0
| 7
|
80bec6a7a3ebe14562ded9376f94f40c73b681de
| 176
|
py
|
Python
|
cclr_py_scripts/__init__.py
|
blaze-the-star/cclear-compiler
|
618daaa56566baf8b7bda35fe8c0fde4100d12c2
|
[
"MIT"
] | null | null | null |
cclr_py_scripts/__init__.py
|
blaze-the-star/cclear-compiler
|
618daaa56566baf8b7bda35fe8c0fde4100d12c2
|
[
"MIT"
] | null | null | null |
cclr_py_scripts/__init__.py
|
blaze-the-star/cclear-compiler
|
618daaa56566baf8b7bda35fe8c0fde4100d12c2
|
[
"MIT"
] | null | null | null |
from cclr_py_scripts import compiler
from cclr_py_scripts import lexer
from cclr_py_scripts import parser
from cclr_py_scripts import commands
from cclr_py_scripts import entry
| 35.2
| 36
| 0.892045
| 30
| 176
| 4.9
| 0.333333
| 0.272109
| 0.340136
| 0.578231
| 0.782313
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.107955
| 176
| 5
| 37
| 35.2
| 0.936306
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| true
| 0
| 1
| 0
| 1
| 0
| 0
| 0
| 0
| null | 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 0
| 1
| 0
|
0
| 7
|
038f03c9651cd91dbda364b3017e4197e07054b0
| 107
|
py
|
Python
|
PIPS/periodogram/custom/models/__init__.py
|
SterlingYM/getPeriod2
|
738834d5f075dc27c2c903b2003a7920418ad772
|
[
"MIT"
] | null | null | null |
PIPS/periodogram/custom/models/__init__.py
|
SterlingYM/getPeriod2
|
738834d5f075dc27c2c903b2003a7920418ad772
|
[
"MIT"
] | null | null | null |
PIPS/periodogram/custom/models/__init__.py
|
SterlingYM/getPeriod2
|
738834d5f075dc27c2c903b2003a7920418ad772
|
[
"MIT"
] | null | null | null |
from PIPS.periodogram.custom.models.Fourier import *
from PIPS.periodogram.custom.models.Gaussian import *
| 35.666667
| 53
| 0.831776
| 14
| 107
| 6.357143
| 0.571429
| 0.179775
| 0.426966
| 0.561798
| 0.696629
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.074766
| 107
| 2
| 54
| 53.5
| 0.89899
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| true
| 0
| 1
| 0
| 1
| 0
| 1
| 0
| 0
| null | 0
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 0
| 1
| 0
|
0
| 7
|
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.