hexsha
string | size
int64 | ext
string | lang
string | max_stars_repo_path
string | max_stars_repo_name
string | max_stars_repo_head_hexsha
string | max_stars_repo_licenses
list | max_stars_count
int64 | max_stars_repo_stars_event_min_datetime
string | max_stars_repo_stars_event_max_datetime
string | max_issues_repo_path
string | max_issues_repo_name
string | max_issues_repo_head_hexsha
string | max_issues_repo_licenses
list | max_issues_count
int64 | max_issues_repo_issues_event_min_datetime
string | max_issues_repo_issues_event_max_datetime
string | max_forks_repo_path
string | max_forks_repo_name
string | max_forks_repo_head_hexsha
string | max_forks_repo_licenses
list | max_forks_count
int64 | max_forks_repo_forks_event_min_datetime
string | max_forks_repo_forks_event_max_datetime
string | content
string | avg_line_length
float64 | max_line_length
int64 | alphanum_fraction
float64 | qsc_code_num_words_quality_signal
int64 | qsc_code_num_chars_quality_signal
float64 | qsc_code_mean_word_length_quality_signal
float64 | qsc_code_frac_words_unique_quality_signal
float64 | qsc_code_frac_chars_top_2grams_quality_signal
float64 | qsc_code_frac_chars_top_3grams_quality_signal
float64 | qsc_code_frac_chars_top_4grams_quality_signal
float64 | qsc_code_frac_chars_dupe_5grams_quality_signal
float64 | qsc_code_frac_chars_dupe_6grams_quality_signal
float64 | qsc_code_frac_chars_dupe_7grams_quality_signal
float64 | qsc_code_frac_chars_dupe_8grams_quality_signal
float64 | qsc_code_frac_chars_dupe_9grams_quality_signal
float64 | qsc_code_frac_chars_dupe_10grams_quality_signal
float64 | qsc_code_frac_chars_replacement_symbols_quality_signal
float64 | qsc_code_frac_chars_digital_quality_signal
float64 | qsc_code_frac_chars_whitespace_quality_signal
float64 | qsc_code_size_file_byte_quality_signal
float64 | qsc_code_num_lines_quality_signal
float64 | qsc_code_num_chars_line_max_quality_signal
float64 | qsc_code_num_chars_line_mean_quality_signal
float64 | qsc_code_frac_chars_alphabet_quality_signal
float64 | qsc_code_frac_chars_comments_quality_signal
float64 | qsc_code_cate_xml_start_quality_signal
float64 | qsc_code_frac_lines_dupe_lines_quality_signal
float64 | qsc_code_cate_autogen_quality_signal
float64 | qsc_code_frac_lines_long_string_quality_signal
float64 | qsc_code_frac_chars_string_length_quality_signal
float64 | qsc_code_frac_chars_long_word_length_quality_signal
float64 | qsc_code_frac_lines_string_concat_quality_signal
float64 | qsc_code_cate_encoded_data_quality_signal
float64 | qsc_code_frac_chars_hex_words_quality_signal
float64 | qsc_code_frac_lines_prompt_comments_quality_signal
float64 | qsc_code_frac_lines_assert_quality_signal
float64 | qsc_codepython_cate_ast_quality_signal
float64 | qsc_codepython_frac_lines_func_ratio_quality_signal
float64 | qsc_codepython_cate_var_zero_quality_signal
bool | qsc_codepython_frac_lines_pass_quality_signal
float64 | qsc_codepython_frac_lines_import_quality_signal
float64 | qsc_codepython_frac_lines_simplefunc_quality_signal
float64 | qsc_codepython_score_lines_no_logic_quality_signal
float64 | qsc_codepython_frac_lines_print_quality_signal
float64 | qsc_code_num_words
int64 | qsc_code_num_chars
int64 | qsc_code_mean_word_length
int64 | qsc_code_frac_words_unique
null | qsc_code_frac_chars_top_2grams
int64 | qsc_code_frac_chars_top_3grams
int64 | qsc_code_frac_chars_top_4grams
int64 | qsc_code_frac_chars_dupe_5grams
int64 | qsc_code_frac_chars_dupe_6grams
int64 | qsc_code_frac_chars_dupe_7grams
int64 | qsc_code_frac_chars_dupe_8grams
int64 | qsc_code_frac_chars_dupe_9grams
int64 | qsc_code_frac_chars_dupe_10grams
int64 | qsc_code_frac_chars_replacement_symbols
int64 | qsc_code_frac_chars_digital
int64 | qsc_code_frac_chars_whitespace
int64 | qsc_code_size_file_byte
int64 | qsc_code_num_lines
int64 | qsc_code_num_chars_line_max
int64 | qsc_code_num_chars_line_mean
int64 | qsc_code_frac_chars_alphabet
int64 | qsc_code_frac_chars_comments
int64 | qsc_code_cate_xml_start
int64 | qsc_code_frac_lines_dupe_lines
int64 | qsc_code_cate_autogen
int64 | qsc_code_frac_lines_long_string
int64 | qsc_code_frac_chars_string_length
int64 | qsc_code_frac_chars_long_word_length
int64 | qsc_code_frac_lines_string_concat
null | qsc_code_cate_encoded_data
int64 | qsc_code_frac_chars_hex_words
int64 | qsc_code_frac_lines_prompt_comments
int64 | qsc_code_frac_lines_assert
int64 | qsc_codepython_cate_ast
int64 | qsc_codepython_frac_lines_func_ratio
int64 | qsc_codepython_cate_var_zero
int64 | qsc_codepython_frac_lines_pass
int64 | qsc_codepython_frac_lines_import
int64 | qsc_codepython_frac_lines_simplefunc
int64 | qsc_codepython_score_lines_no_logic
int64 | qsc_codepython_frac_lines_print
int64 | effective
string | hits
int64 |
|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|
e0b1d99b11745a9c28656509493706af8a72dee9
| 119
|
py
|
Python
|
DataStructures-Algorithms/Udacity-Python-DSA/03-Recursion/reverse_string.py
|
shoaibur/SWE
|
1e114a2750f2df5d6c50b48c8e439224894d65da
|
[
"MIT"
] | 1
|
2020-11-14T18:28:13.000Z
|
2020-11-14T18:28:13.000Z
|
DataStructures-Algorithms/Udacity-Python-DSA/03-Recursion/reverse_string.py
|
shoaibur/SWE
|
1e114a2750f2df5d6c50b48c8e439224894d65da
|
[
"MIT"
] | null | null | null |
DataStructures-Algorithms/Udacity-Python-DSA/03-Recursion/reverse_string.py
|
shoaibur/SWE
|
1e114a2750f2df5d6c50b48c8e439224894d65da
|
[
"MIT"
] | null | null | null |
def reverse_string(string):
if len(string) <= 1: return string
return string[-1] + reverse_string(string[:-1])
| 29.75
| 51
| 0.680672
| 17
| 119
| 4.647059
| 0.411765
| 0.265823
| 0.481013
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.030303
| 0.168067
| 119
| 3
| 52
| 39.666667
| 0.767677
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0.333333
| false
| 0
| 0
| 0
| 0.666667
| 0
| 1
| 0
| 0
| null | 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 1
| 0
|
0
| 6
|
e0bfa013f8da25932e2b88dc3dde410810bd188e
| 3,805
|
py
|
Python
|
models/map.py
|
PavelVavruska/python-raycaster
|
e548d8ef46d51d38eaf9363b63c414a916d9b533
|
[
"MIT"
] | 1
|
2020-03-19T22:59:38.000Z
|
2020-03-19T22:59:38.000Z
|
models/map.py
|
PavelVavruska/python-raycaster
|
e548d8ef46d51d38eaf9363b63c414a916d9b533
|
[
"MIT"
] | null | null | null |
models/map.py
|
PavelVavruska/python-raycaster
|
e548d8ef46d51d38eaf9363b63c414a916d9b533
|
[
"MIT"
] | null | null | null |
# Copyright (c) 2019 Pavel Vavruska
# Permission is hereby granted, free of charge, to any person obtaining a copy
# of this software and associated documentation files (the "Software"), to deal
# in the Software without restriction, including without limitation the rights
# to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
# copies of the Software, and to permit persons to whom the Software is
# furnished to do so, subject to the following conditions:
#
# The above copyright notice and this permission notice shall be included in all
# copies or substantial portions of the Software.
#
# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
# IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
# FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
# AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
# LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
# OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE
# SOFTWARE.
class Map:
def __init__(self):
self.__map_base = [
[10, 10, 10, 10, 10, 10, 12, 12, 12, 12, 12, 12, 12, 12, 14, 14, 14, 14, 14, 14],
[10, -1,000, -1, -1, 10, 12, -1, -1, -1, -1, -1, -1, 12, 14, -1, -1, -1, -1, 14],
[10, -1, -1, -1, -1, 10, 12, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, 14],
[10, -1, -1, -1, -1, 10, 12, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, 14],
[10, -1, -1, -1, -1, 10, 12, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, 14],
[10, -1, -1, -1, -1, 10, 12, -1, -1, -1, -1, -1, -1, 12, 14, 14, -1, -1, -1, 14],
[10, -1, -1, -1, -1, 10, 12, -1, -1, -1, -1, -1, -1, 12, 14, -1, -1, -1, -1, 14],
[10, -1, -1, -1, -1, 10, 12, -1, -1, -1, -1, -1, -1, 12, 14, -1, -1, -1, -1, 14],
[10, -1, -1, -1, -1, 10, 12, -1, -1, -1, -1, -1, -1, 12, 14, -1, -1, -1, -1, 14],
[10, 10, -1, -1, -1, 10, 12, -1, -1, -1, -1, -1, -1, 12, 14, -1, -1, -1, 14, 14],
[10, -1, -1, -1, -1, 10, 12, -1, -1, -1, -1, -1, -1, 12, 14, -1, -1, -1, -1, 14],
[10, -1, -1, -1, -1, 10, 12, 11, 11, 11, -1, -1, -1, 12, 14, -1, -1, -1, -1, 14],
[10, -1, -1, -1, -1, 10, 12, -1, -1, -1, -1, -1, -1, 12, 14, -1, -1, -1, -1, 14],
[10, -1, -1, -1, -1, 10, 12, -1, -1, -1, -1, -1, -1, 12, 14, 14, -1, -1, -1, 14],
[10, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, 12, 14, -1, -1, -1, -1, 14],
[10, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, 12, 14, -1, -1, -1, -1, 14],
[10, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, 12, 14, -1, -1, -1, -1, 14],
[10, -1, -1, -1, -1, 10, 12, -1, -1, -1, -1, -1, -1, 12, 14, -1, -1, -1, -1, 14],
[10, -1, -1, -1, -1, 10, 12, -1, -1, -1, -1, -1, -1, 12, 14, -1, -1,000, -1, 14],
[10, 10, 10, 10, 10, 10, 12, 12, 12, 12, 12, 12, 12, 12, 14, 14, 14, 14, 14, 14]
]
self.__size_y = len(self.__map_base)
self.__size_x = len(self.__map_base[0])
self.__map_effects = [[0]*self.__size_y for i in range(self.__size_x)]
@property
def size_x(self):
return self.__size_x
@property
def size_y(self):
return self.__size_y
@property
def data(self):
return self.__map_base
def get_at(self, x, y):
return self.__map_base[y][x]
def set_at(self, x, y, number):
self.data[y][x] = number
def get_effect_at(self, x, y):
return self.__map_effects[y][x]
def set_effect_at(self, x, y, number):
self.__map_effects[y][x] = number
@property
def effect_data(self):
return self.__map_effects
| 50.065789
| 93
| 0.496452
| 691
| 3,805
| 2.649783
| 0.178003
| 0.223921
| 0.257236
| 0.24249
| 0.397597
| 0.355543
| 0.309667
| 0.286729
| 0.286729
| 0.286729
| 0
| 0.20249
| 0.28226
| 3,805
| 76
| 94
| 50.065789
| 0.46796
| 0.280946
| 0
| 0.382979
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0.191489
| false
| 0
| 0
| 0.12766
| 0.340426
| 0
| 0
| 0
| 0
| null | 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
|
0
| 6
|
e0db353b448a31515653bfce4ce884786d0ceb9e
| 231
|
py
|
Python
|
HLTriggerOffline/Tau/python/Validation/HLTTauValidation_cff.py
|
ckamtsikis/cmssw
|
ea19fe642bb7537cbf58451dcf73aa5fd1b66250
|
[
"Apache-2.0"
] | 852
|
2015-01-11T21:03:51.000Z
|
2022-03-25T21:14:00.000Z
|
HLTriggerOffline/Tau/python/Validation/HLTTauValidation_cff.py
|
ckamtsikis/cmssw
|
ea19fe642bb7537cbf58451dcf73aa5fd1b66250
|
[
"Apache-2.0"
] | 30,371
|
2015-01-02T00:14:40.000Z
|
2022-03-31T23:26:05.000Z
|
HLTriggerOffline/Tau/python/Validation/HLTTauValidation_cff.py
|
ckamtsikis/cmssw
|
ea19fe642bb7537cbf58451dcf73aa5fd1b66250
|
[
"Apache-2.0"
] | 3,240
|
2015-01-02T05:53:18.000Z
|
2022-03-31T17:24:21.000Z
|
import FWCore.ParameterSet.Config as cms
from HLTriggerOffline.Tau.Validation.HLTTauReferences_cfi import *
from HLTriggerOffline.Tau.Validation.HLTTauValidation_cfi import *
HLTTauVal = cms.Sequence(hltTauRef+hltTauValIdeal)
| 33
| 66
| 0.848485
| 25
| 231
| 7.76
| 0.68
| 0.206186
| 0.237113
| 0.340206
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.08658
| 231
| 6
| 67
| 38.5
| 0.919431
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| false
| 0
| 0.75
| 0
| 0.75
| 0
| 1
| 0
| 0
| null | 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
|
0
| 6
|
e0e780b61a5d42af1a833e7953916ce3b1676592
| 40
|
py
|
Python
|
source/__init__.py
|
elpapi42/simpleid
|
c6799d9f90f88bd5ed38a9935436d165b0e3518f
|
[
"MIT"
] | null | null | null |
source/__init__.py
|
elpapi42/simpleid
|
c6799d9f90f88bd5ed38a9935436d165b0e3518f
|
[
"MIT"
] | null | null | null |
source/__init__.py
|
elpapi42/simpleid
|
c6799d9f90f88bd5ed38a9935436d165b0e3518f
|
[
"MIT"
] | 1
|
2021-07-11T19:19:43.000Z
|
2021-07-11T19:19:43.000Z
|
from source.simpleidbits import SimpleId
| 40
| 40
| 0.9
| 5
| 40
| 7.2
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.075
| 40
| 1
| 40
| 40
| 0.972973
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| true
| 0
| 1
| 0
| 1
| 0
| 1
| 1
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 0
| 1
| 0
|
0
| 6
|
e0ee104973e98e2be5c08f9e2c2fc12870bc03a4
| 25
|
py
|
Python
|
dlgo/gtp/__init__.py
|
BachFive/GammaGo_3
|
3eb8e82eef01718684ba8594be49fdac04503e5e
|
[
"MIT"
] | null | null | null |
dlgo/gtp/__init__.py
|
BachFive/GammaGo_3
|
3eb8e82eef01718684ba8594be49fdac04503e5e
|
[
"MIT"
] | null | null | null |
dlgo/gtp/__init__.py
|
BachFive/GammaGo_3
|
3eb8e82eef01718684ba8594be49fdac04503e5e
|
[
"MIT"
] | 1
|
2020-06-11T21:55:31.000Z
|
2020-06-11T21:55:31.000Z
|
from .frontend import *
| 8.333333
| 23
| 0.72
| 3
| 25
| 6
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.2
| 25
| 2
| 24
| 12.5
| 0.9
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| true
| 0
| 1
| 0
| 1
| 0
| 1
| 1
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 0
| 1
| 0
|
0
| 6
|
46160ad811def77d55bb230d2f0a9f36c17e2159
| 32
|
py
|
Python
|
moncli/entities/__init__.py
|
Ashatz/moncli
|
ba82cc163f589635e4c7dbf80a1c9aea0aa1425a
|
[
"MIT"
] | 1
|
2020-08-17T17:05:03.000Z
|
2020-08-17T17:05:03.000Z
|
moncli/entities/__init__.py
|
Ashatz/moncli
|
ba82cc163f589635e4c7dbf80a1c9aea0aa1425a
|
[
"MIT"
] | null | null | null |
moncli/entities/__init__.py
|
Ashatz/moncli
|
ba82cc163f589635e4c7dbf80a1c9aea0aa1425a
|
[
"MIT"
] | null | null | null |
from .client import MondayClient
| 32
| 32
| 0.875
| 4
| 32
| 7
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.09375
| 32
| 1
| 32
| 32
| 0.965517
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| true
| 0
| 1
| 0
| 1
| 0
| 1
| 1
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 0
| 1
| 0
|
0
| 6
|
1c9f1cfac0e834ee4f0c23449e537c43cb144769
| 377
|
py
|
Python
|
Django/Video_Project/Day01/HelloDjango/App/views.py
|
pyforspider/LearningLog
|
ac5988d7fbb0d07d6e7485f9050250af5bcba089
|
[
"MIT"
] | null | null | null |
Django/Video_Project/Day01/HelloDjango/App/views.py
|
pyforspider/LearningLog
|
ac5988d7fbb0d07d6e7485f9050250af5bcba089
|
[
"MIT"
] | 18
|
2020-02-12T01:18:12.000Z
|
2022-03-12T00:42:15.000Z
|
Django/Video_Project/Day01/HelloDjango/App/views.py
|
pyforspider/LearningLog
|
ac5988d7fbb0d07d6e7485f9050250af5bcba089
|
[
"MIT"
] | null | null | null |
from django.http import HttpResponse
from django.shortcuts import render
# Create your views here.
def hello(request):
return HttpResponse('hello')
def hehe(request):
return HttpResponse('hehe')
def haha(request):
return HttpResponse('<h1>haha</h1>')
def index(request):
return render(request, 'index.html')
def home(request):
return render(request, 'home.html')
| 16.391304
| 37
| 0.742706
| 50
| 377
| 5.6
| 0.42
| 0.232143
| 0.267857
| 0.185714
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.006098
| 0.129973
| 377
| 23
| 38
| 16.391304
| 0.847561
| 0.061008
| 0
| 0
| 0
| 0
| 0.116147
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0.416667
| false
| 0
| 0.166667
| 0.416667
| 1
| 0
| 0
| 0
| 0
| null | 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 1
| 1
| 0
|
0
| 6
|
1cadd35c322ff6512454d00380a444313857015c
| 63
|
py
|
Python
|
twist/utils/__init__.py
|
YourLocalDeveloper/Twist
|
2734ffc9206071570fecbb89d0b6b25364b00330
|
[
"MIT"
] | 5
|
2020-11-25T19:41:07.000Z
|
2021-02-17T18:35:38.000Z
|
twist/utils/__init__.py
|
YourLocalDeveloper/Twist
|
2734ffc9206071570fecbb89d0b6b25364b00330
|
[
"MIT"
] | null | null | null |
twist/utils/__init__.py
|
YourLocalDeveloper/Twist
|
2734ffc9206071570fecbb89d0b6b25364b00330
|
[
"MIT"
] | null | null | null |
from .static import *
from .tests import *
from .wsgi import *
| 15.75
| 21
| 0.714286
| 9
| 63
| 5
| 0.555556
| 0.444444
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.190476
| 63
| 3
| 22
| 21
| 0.882353
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| true
| 0
| 1
| 0
| 1
| 0
| 1
| 0
| 0
| null | 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 0
| 1
| 0
|
0
| 6
|
1cc6cd07c25398a41b180c3c1bbf22c9547a5533
| 33,018
|
py
|
Python
|
api/tests/test_asset_log.py
|
raheemazeezabiodun/art-backend
|
0bc47f3cf6f403101082f201c7fd1ca8108d5731
|
[
"MIT"
] | null | null | null |
api/tests/test_asset_log.py
|
raheemazeezabiodun/art-backend
|
0bc47f3cf6f403101082f201c7fd1ca8108d5731
|
[
"MIT"
] | null | null | null |
api/tests/test_asset_log.py
|
raheemazeezabiodun/art-backend
|
0bc47f3cf6f403101082f201c7fd1ca8108d5731
|
[
"MIT"
] | null | null | null |
# Standard Library
from datetime import datetime
from unittest.mock import patch
# Third-Party Imports
from django.contrib.auth import get_user_model
from django.core.exceptions import ValidationError
from rest_framework.test import APIClient
# App Imports
from api.tests import APIBaseTestCase
from core.constants import CHECKIN, CHECKOUT
from core.models import Asset, AssetLog, AssetMake, AssetModelNumber
User = get_user_model()
client = APIClient()
class AssetLogModelTest(APIBaseTestCase):
"""Tests for the AssetLog Model and API"""
def setUp(self):
self.test_asset_make = AssetMake.objects.create(
name="Test Asset Make", asset_type=self.test_asset_type
)
self.test_assetmodel1 = AssetModelNumber.objects.create(
name="IMN50987", asset_make=self.test_asset_make
)
self.test_other_asset = Asset(
asset_code="IC00sf",
serial_number="SN00134",
model_number=self.test_assetmodel1,
purchase_date="2018-07-10",
asset_location=self.centre,
)
self.test_other_asset.save()
self.checkin = AssetLog.objects.create(
checked_by=self.security_user, asset=self.asset, log_type=CHECKIN
)
self.checkout = AssetLog.objects.create(
checked_by=self.security_user, asset=self.asset, log_type=CHECKOUT
)
def test_verify_double_checkin_for_asset(self):
# First log
AssetLog.objects.create(
checked_by=self.security_user, asset=self.test_other_asset, log_type=CHECKIN
)
initial_log_count = AssetLog.objects.count()
# Second log
AssetLog.objects.create(
checked_by=self.security_user, asset=self.test_other_asset, log_type=CHECKIN
)
final_log_count = AssetLog.objects.count()
self.assertEqual(initial_log_count, final_log_count)
def test_verify_checkin_for_asset_once_checked_in(self):
# First log
AssetLog.objects.create(
checked_by=self.security_user, asset=self.test_other_asset, log_type=CHECKIN
)
# Second log
AssetLog.objects.create(
checked_by=self.security_user,
asset=self.test_other_asset,
log_type=CHECKOUT,
)
initial_log_count = AssetLog.objects.count()
# Checkin First log again
AssetLog.objects.create(
checked_by=self.security_user, asset=self.test_other_asset, log_type=CHECKIN
)
final_log_count = AssetLog.objects.count()
self.assertEqual(initial_log_count + 1, final_log_count)
def test_add_checkin(self):
AssetLog.objects.create(
checked_by=self.security_user, asset=self.test_other_asset, log_type=CHECKIN
)
self.assertEqual(AssetLog.objects.count(), 3)
created_log = AssetLog.objects.filter(asset=self.test_other_asset).first()
self.assertEqual(created_log.log_type, CHECKIN)
def test_verify_double_checkout_for_asset(self):
# First log
AssetLog.objects.create(
checked_by=self.security_user,
asset=self.test_other_asset,
log_type=CHECKOUT,
)
initial_log_count = AssetLog.objects.count()
# Second log
AssetLog.objects.create(
checked_by=self.security_user,
asset=self.test_other_asset,
log_type=CHECKOUT,
)
final_log_count = AssetLog.objects.count()
self.assertEqual(initial_log_count, final_log_count)
def test_verify_checkout_for_asset_once_checked_out(self):
# First log
AssetLog.objects.create(
checked_by=self.security_user,
asset=self.test_other_asset,
log_type=CHECKOUT,
)
# Second log
AssetLog.objects.create(
checked_by=self.security_user, asset=self.test_other_asset, log_type=CHECKIN
)
initial_log_count = AssetLog.objects.count()
# Checkout First log again
AssetLog.objects.create(
checked_by=self.security_user,
asset=self.test_other_asset,
log_type=CHECKOUT,
)
final_log_count = AssetLog.objects.count()
self.assertEqual(initial_log_count + 1, final_log_count)
def test_add_checkout(self):
count_before_log = AssetLog.objects.count()
AssetLog.objects.create(
checked_by=self.security_user,
asset=self.test_other_asset,
log_type=CHECKOUT,
)
self.assertEqual(AssetLog.objects.count(), count_before_log + 1)
def test_add_checkin_without_log_type(self):
with self.assertRaises(ValidationError) as e:
AssetLog.objects.create(
checked_by=self.security_user, asset=self.test_other_asset
)
self.assertEqual(
e.exception.message_dict,
{
"log_type": ["This field cannot be blank."],
"__all__": ["Log type is required."],
},
)
def test_delete_checkin(self):
self.assertEqual(AssetLog.objects.count(), 2)
self.checkin.delete()
self.assertEqual(AssetLog.objects.count(), 1)
def test_update_checkin(self):
self.checkin.asset = self.test_other_asset
self.checkin.save()
self.assertEqual(
self.checkin.asset.asset_code, self.test_other_asset.asset_code
)
def test_update_checkout(self):
self.checkout.asset = self.test_other_asset
self.checkout.save()
self.assertEqual(
self.checkout.asset.asset_code, self.test_other_asset.asset_code
)
def test_non_authenticated_user_checkin_checkout(self):
response = client.get(self.asset_logs_url)
self.assertEqual(
response.data, {"detail": "Authentication credentials were not provided."}
)
def test_checkout_model_string_representation(self):
self.assertEqual(
str(self.checkin.asset.serial_number), self.asset.serial_number
)
@patch("api.authentication.auth.verify_id_token")
def test_authenticated_normal_user_list_checkin_checkout(
self, mock_verify_id_token
):
mock_verify_id_token.return_value = {"email": self.user.email}
response = client.get(
self.asset_logs_url, HTTP_AUTHORIZATION="Token {}".format(self.token_user)
)
self.assertEqual(
response.data,
{"detail": "You do not have permission to perform this action."},
)
self.assertEqual(response.status_code, 403)
@patch("api.authentication.auth.verify_id_token")
def test_authenticated_security_user_list_checkin_checkout(
self, mock_verify_id_token
):
mock_verify_id_token.return_value = {"email": self.security_user.email}
response = client.get(
self.asset_logs_url,
HTTP_AUTHORIZATION="Token {}".format(self.token_checked_by),
)
self.assertIn(self.checkout.id, response.data["results"][0].values())
self.assertEqual(len(response.data["results"]), AssetLog.objects.count())
self.assertEqual(response.status_code, 200)
@patch("api.authentication.auth.verify_id_token")
def test_authenticated_admin_user_list_checkin_checkout(self, mock_verify_id_token):
mock_verify_id_token.return_value = {"email": self.admin_user.email}
response = client.get(
self.asset_logs_url, HTTP_AUTHORIZATION="Token {}".format(self.token_admin)
)
self.assertIn(self.checkout.id, response.data["results"][0].values())
self.assertEqual(len(response.data["results"]), AssetLog.objects.count())
self.assertEqual(response.status_code, 200)
# test asset log filters
@patch("api.authentication.auth.verify_id_token")
def test_authenticated_admin_user_get_filtered_list_of_asset_logs(
self, mock_verify_id_token
):
mock_verify_id_token.return_value = {"email": self.admin_user.email}
AssetLog.objects.create(
checked_by=self.security_user, asset=self.test_other_asset, log_type=CHECKIN
)
asset_logs_url = (
f"{self.asset_logs_url}/?asset_type={self.test_asset_type.name}"
)
response = client.get(
asset_logs_url, HTTP_AUTHORIZATION=f"Token {self.token_admin}"
)
self.assertEqual(response.status_code, 200)
self.assertEqual(len(response.data["results"]), 1)
self.assertEqual(
response.data["results"][0]["asset"],
f"{self.test_other_asset.serial_number} - {self.test_other_asset.asset_code}",
)
@patch("api.authentication.auth.verify_id_token")
def test_authenticated_admin_user_get_asset_logs_filtered_by_serial_number(
self, mock_verify_id_token
):
mock_verify_id_token.return_value = {"email": self.admin_user.email}
AssetLog.objects.create(
checked_by=self.security_user, asset=self.test_other_asset, log_type=CHECKIN
)
asset_logs_url = (
f"{self.asset_logs_url}/?asset_serial={self.test_other_asset.serial_number}"
)
response = client.get(
asset_logs_url, HTTP_AUTHORIZATION=f"Token {self.token_admin}"
)
self.assertEqual(response.status_code, 200)
self.assertEqual(len(response.data["results"]), 1)
self.assertEqual(
response.data["results"][0]["asset"],
f"{self.test_other_asset.serial_number} - {self.test_other_asset.asset_code}",
)
@patch("api.authentication.auth.verify_id_token")
def test_admin_user_get_asset_logs_filtered_by_invalid_serial_number(
self, mock_verify_id_token
):
mock_verify_id_token.return_value = {"email": self.admin_user.email}
AssetLog.objects.create(
checked_by=self.security_user, asset=self.test_other_asset, log_type=CHECKIN
)
asset_logs_url = f"{self.asset_logs_url}/?asset_serial=SERIALDONTEXIST"
response = client.get(
asset_logs_url, HTTP_AUTHORIZATION=f"Token {self.token_admin}"
)
self.assertEqual(response.status_code, 200)
self.assertEqual(len(response.data["results"]), 0)
@patch("api.authentication.auth.verify_id_token")
def test_admin_user_get_asset_logs_filtered_by_asset_code(
self, mock_verify_id_token
):
mock_verify_id_token.return_value = {"email": self.admin_user.email}
AssetLog.objects.create(
checked_by=self.security_user, asset=self.test_other_asset, log_type=CHECKIN
)
asset_logs_url = (
f"{self.asset_logs_url}/?asset_code={self.test_other_asset.asset_code}"
)
response = client.get(
asset_logs_url, HTTP_AUTHORIZATION=f"Token {self.token_admin}"
)
self.assertEqual(response.status_code, 200)
self.assertEqual(len(response.data["results"]), 1)
self.assertEqual(
response.data["results"][0]["asset"],
f"{self.test_other_asset.serial_number} - {self.test_other_asset.asset_code}",
)
@patch("api.authentication.auth.verify_id_token")
def test_admin_user_get_asset_logs_filtered_by_invalid_asset_code(
self, mock_verify_id_token
):
mock_verify_id_token.return_value = {"email": self.admin_user.email}
AssetLog.objects.create(
checked_by=self.security_user, asset=self.test_other_asset, log_type=CHECKIN
)
asset_logs_url = f"{self.asset_logs_url}/?asset_serial=CODEDONTEXIST"
response = client.get(
asset_logs_url, HTTP_AUTHORIZATION=f"Token {self.token_admin}"
)
self.assertEqual(response.status_code, 200)
self.assertEqual(len(response.data["results"]), 0)
@patch("api.authentication.auth.verify_id_token")
def test_authenticated_admin_user_gets_filtered_list_of_asset_logs_by_checked_by(
self, mock_verify_id_token
):
mock_verify_id_token.return_value = {"email": self.admin_user.email}
asset = AssetLog.objects.create(
checked_by=self.security_user, asset=self.test_other_asset, log_type=CHECKIN
)
asset_logs_url = f"{self.asset_logs_url}?checked_by={asset.checked_by}"
response = client.get(
asset_logs_url, HTTP_AUTHORIZATION=f"Token {self.token_admin}"
)
self.assertEqual(response.status_code, 200)
self.assertEqual(
response.data["results"][0]["checked_by"], f"{self.security_user}"
)
@patch("api.authentication.auth.verify_id_token")
def test_authenticated_admin_user_get_of_asset_logs_invalid_filter(
self, mock_verify_id_token
):
mock_verify_id_token.return_value = {"email": self.admin_user.email}
self.checkin = AssetLog.objects.create(
checked_by=self.security_user, asset=self.test_other_asset, log_type=CHECKIN
)
asset_logs_url = f"{self.asset_logs_url}/?asset_type=filterdontexit"
response = client.get(
asset_logs_url, HTTP_AUTHORIZATION=f"Token {self.token_admin}"
)
self.assertEqual(response.status_code, 200)
self.assertEqual(len(response.data["results"]), 0)
@patch(
"django.utils.timezone.now",
return_value=datetime(2017, 9, 22, 17, 1, 26, 842_150),
)
@patch("api.authentication.auth.verify_id_token")
def test_filter_asset_logs_by_year(self, mock_verify_id_token, mock_datetime):
mock_verify_id_token.return_value = {"email": self.admin_user.email}
asset_log = AssetLog.objects.create(
checked_by=self.security_user, asset=self.test_other_asset, log_type=CHECKIN
)
asset_logs_url = f"{self.asset_logs_url}?year={mock_datetime.return_value.year}"
response = client.get(
asset_logs_url, HTTP_AUTHORIZATION=f"Token {self.token_admin}"
)
data = response.data["results"]
self.assertEqual(response.status_code, 200)
self.assertEqual(
data[0]["created_at"].split("-")[0], f"{asset_log.created_at.year}"
)
logs_count = AssetLog.objects.filter(
created_at__year=mock_datetime.return_value.year
).count()
self.assertEqual(len(response.data["results"]), logs_count)
@patch("api.authentication.auth.verify_id_token")
def test_filter_asset_logs_by_invalid_year(self, mock_verify_id_token):
mock_verify_id_token.return_value = {"email": self.admin_user.email}
AssetLog.objects.create(
checked_by=self.security_user, asset=self.test_other_asset, log_type=CHECKIN
)
asset_logs_url = f"{self.asset_logs_url}?year=5"
response = client.get(
asset_logs_url, HTTP_AUTHORIZATION=f"Token {self.token_admin}"
)
self.assertEqual(response.status_code, 200)
logs_count = AssetLog.objects.filter(created_at__year="5").count()
self.assertEqual(len(response.data["results"]), logs_count)
@patch(
"django.utils.timezone.now",
return_value=datetime(2017, 9, 22, 17, 1, 26, 842_150),
)
@patch("api.authentication.auth.verify_id_token")
def test_filter_asset_logs_by_month(self, mock_verify_id_token, mock_datetime):
mock_verify_id_token.return_value = {"email": self.admin_user.email}
AssetLog.objects.create(
checked_by=self.security_user, asset=self.test_other_asset, log_type=CHECKIN
)
asset_logs_url = (
f"{self.asset_logs_url}?month={mock_datetime.return_value.month}"
)
response = client.get(
asset_logs_url, HTTP_AUTHORIZATION=f"Token {self.token_admin}"
)
data = response.data["results"]
date = datetime.strptime(data[0]["created_at"], "%Y-%m-%dT%H:%M:%S.%fZ")
self.assertEqual(response.status_code, 200)
self.assertEqual(date.month, mock_datetime.return_value.month)
logs_count = AssetLog.objects.filter(
created_at__month=mock_datetime.return_value.month
).count()
self.assertEqual(len(response.data["results"]), logs_count)
@patch("api.authentication.auth.verify_id_token")
def test_filter_asset_logs_by_invalid_month(self, mock_verify_id_token):
mock_verify_id_token.return_value = {"email": self.admin_user.email}
AssetLog.objects.create(
checked_by=self.security_user, asset=self.test_other_asset, log_type=CHECKIN
)
asset_logs_url = f"{self.asset_logs_url}?month=34"
response = client.get(
asset_logs_url, HTTP_AUTHORIZATION=f"Token {self.token_admin}"
)
self.assertEqual(response.status_code, 200)
logs_count = AssetLog.objects.filter(created_at__month="34").count()
self.assertEqual(len(response.data["results"]), logs_count)
@patch(
"django.utils.timezone.now",
return_value=datetime(2017, 9, 22, 17, 1, 26, 842_150),
)
@patch("api.authentication.auth.verify_id_token")
def test_filter_asset_logs_by_day(self, mock_verify_id_token, mock_datetime):
mock_verify_id_token.return_value = {"email": self.admin_user.email}
AssetLog.objects.create(
checked_by=self.security_user, asset=self.test_other_asset, log_type=CHECKIN
)
asset_logs_url = f"{self.asset_logs_url}?day={mock_datetime.return_value.day}"
response = client.get(
asset_logs_url, HTTP_AUTHORIZATION=f"Token {self.token_admin}"
)
data = response.data["results"]
date = datetime.strptime(data[0]["created_at"], "%Y-%m-%dT%H:%M:%S.%fZ")
self.assertEqual(response.status_code, 200)
self.assertEqual(date.day, mock_datetime.return_value.day)
logs_count = AssetLog.objects.filter(
created_at__day=mock_datetime.return_value.day
).count()
self.assertEqual(len(response.data["results"]), logs_count)
@patch("api.authentication.auth.verify_id_token")
def test_filter_asset_logs_by_invalid_day(self, mock_verify_id_token):
mock_verify_id_token.return_value = {"email": self.admin_user.email}
AssetLog.objects.create(
checked_by=self.security_user, asset=self.test_other_asset, log_type=CHECKIN
)
asset_logs_url = f"{self.asset_logs_url}?day=456"
response = client.get(
asset_logs_url, HTTP_AUTHORIZATION=f"Token {self.token_admin}"
)
self.assertEqual(response.status_code, 200)
logs_count = AssetLog.objects.filter(created_at__day="456").count()
self.assertEqual(len(response.data["results"]), logs_count)
@patch(
"django.utils.timezone.now",
return_value=datetime(2017, 9, 22, 17, 1, 26, 842_150),
)
@patch("api.authentication.auth.verify_id_token")
def test_filter_asset_logs_with_collective_attributes(
self, mock_verify_id_token, mock_datetime
):
"""
Test filter asset logs with 3 date attributes
i.e year=2019&month=5&day=15
"""
mock_verify_id_token.return_value = {"email": self.admin_user.email}
AssetLog.objects.create(
checked_by=self.security_user, asset=self.test_other_asset, log_type=CHECKIN
)
year = mock_datetime.return_value.year
month = mock_datetime.return_value.month
day = mock_datetime.return_value.day
asset_logs_url = f"{self.asset_logs_url}?year={year}&month={month}&day={day}"
response = client.get(
asset_logs_url, HTTP_AUTHORIZATION=f"Token {self.token_admin}"
)
data = response.data["results"]
date = datetime.strptime(data[0]["created_at"], "%Y-%m-%dT%H:%M:%S.%fZ")
self.assertEqual(response.status_code, 200)
self.assertEqual(date, mock_datetime.return_value)
logs_count = AssetLog.objects.filter(
created_at=mock_datetime.return_value
).count()
self.assertEqual(len(response.data["results"]), logs_count)
@patch("api.authentication.auth.verify_id_token")
def test_filter_asset_logs_with_collective_non_matching_attributes(
self, mock_verify_id_token
):
"""
Test filter asset logs with 3 date attributes
i.e year=2019&month=5&day=15
"""
mock_verify_id_token.return_value = {"email": self.admin_user.email}
AssetLog.objects.create(
checked_by=self.security_user, asset=self.test_other_asset, log_type=CHECKIN
)
asset_logs_url = f"{self.asset_logs_url}?year=2077&month=09&day=22"
response = client.get(
asset_logs_url, HTTP_AUTHORIZATION=f"Token {self.token_admin}"
)
self.assertEqual(response.status_code, 200)
logs_count = AssetLog.objects.filter(created_at="2077-09-22").count()
self.assertEqual(len(response.data["results"]), logs_count)
@patch("api.authentication.auth.verify_id_token")
def test_authenticated_normal_user_create_checkin(self, mock_verify_id_token):
mock_verify_id_token.return_value = {"email": self.user.email}
response = client.get(
self.asset_logs_url, HTTP_AUTHORIZATION="Token {}".format(self.token_user)
)
self.assertEqual(
response.data,
{"detail": "You do not have permission to perform this action."},
)
self.assertEqual(response.status_code, 403)
@patch("api.authentication.auth.verify_id_token")
def test_authenticated_security_user_create_checkin(self, mock_verify_id_token):
mock_verify_id_token.return_value = {"email": self.security_user.email}
data = {"asset": self.test_other_asset.id, "log_type": "Checkin"}
response = client.post(
self.asset_logs_url,
data,
HTTP_AUTHORIZATION="Token {}".format(self.token_checked_by),
)
self.assertEqual(
response.data["asset"],
f"{self.test_other_asset.serial_number} - "
f"{self.test_other_asset.asset_code}",
)
self.assertEqual(response.status_code, 201)
@patch("api.authentication.auth.verify_id_token")
def test_authenticated_security_user_cannot_double_checkin_an_asset(
self, mock_verify_id_token
):
mock_verify_id_token.return_value = {"email": self.security_user.email}
AssetLog.objects.create(
checked_by=self.security_user, asset=self.test_other_asset, log_type=CHECKIN
)
initial_log_count = AssetLog.objects.count()
data = {"asset": self.test_other_asset.id, "log_type": CHECKIN}
response = client.post(
self.asset_logs_url,
data,
HTTP_AUTHORIZATION="Token {}".format(self.token_checked_by),
)
updated_log_count = AssetLog.objects.count()
self.assertEqual(response.status_code, 400)
self.assertEqual(initial_log_count, updated_log_count)
@patch("api.authentication.auth.verify_id_token")
def test_that_authenticated_security_user_can_checkin_asset_previously_checked_in(
self, mock_verify_id_token
):
mock_verify_id_token.return_value = {"email": self.security_user.email}
# Check in the asset
AssetLog.objects.create(
checked_by=self.security_user, asset=self.test_other_asset, log_type=CHECKIN
)
# Checkout the asset
AssetLog.objects.create(
checked_by=self.security_user,
asset=self.test_other_asset,
log_type=CHECKOUT,
)
initial_log_count = AssetLog.objects.count()
data = {"asset": self.test_other_asset.id, "log_type": CHECKIN}
response = client.post(
self.asset_logs_url,
data,
HTTP_AUTHORIZATION="Token {}".format(self.token_checked_by),
)
updated_log_count = AssetLog.objects.count()
self.assertEqual(response.status_code, 201)
self.assertEqual(initial_log_count + 1, updated_log_count)
@patch("api.authentication.auth.verify_id_token")
def test_authenticated_security_user_can_checkout_asset_previously_checked_out(
self, mock_verify_id_token
):
mock_verify_id_token.return_value = {"email": self.security_user.email}
# Check out the asset
AssetLog.objects.create(
checked_by=self.security_user,
asset=self.test_other_asset,
log_type=CHECKOUT,
)
# Check in the asset
AssetLog.objects.create(
checked_by=self.security_user, asset=self.test_other_asset, log_type=CHECKIN
)
initial_log_count = AssetLog.objects.count()
data = {"asset": self.test_other_asset.id, "log_type": CHECKOUT}
response = client.post(
self.asset_logs_url,
data,
HTTP_AUTHORIZATION="Token {}".format(self.token_checked_by),
)
updated_log_count = AssetLog.objects.count()
self.assertEqual(response.status_code, 201)
self.assertEqual(initial_log_count + 1, updated_log_count)
@patch("api.authentication.auth.verify_id_token")
def test_authenticated_security_user_create_checkout(self, mock_verify_id_token):
mock_verify_id_token.return_value = {"email": self.security_user.email}
data = {"asset": self.test_other_asset.id, "log_type": CHECKOUT}
response = client.post(
self.asset_logs_url,
data,
HTTP_AUTHORIZATION="Token {}".format(self.token_checked_by),
)
self.assertEqual(
response.data["asset"],
f"{self.test_other_asset.serial_number} - "
f"{self.test_other_asset.asset_code}",
)
self.assertEqual(response.status_code, 201)
@patch("api.authentication.auth.verify_id_token")
def test_authenticated_security_user_cannot_double_checkout_an_asset(
self, mock_verify_id_token
):
mock_verify_id_token.return_value = {"email": self.security_user.email}
AssetLog.objects.create(
checked_by=self.security_user,
asset=self.test_other_asset,
log_type=CHECKOUT,
)
initial_log_count = AssetLog.objects.count()
data = {"asset": self.test_other_asset.id, "log_type": CHECKOUT}
response = client.post(
self.asset_logs_url,
data,
HTTP_AUTHORIZATION="Token {}".format(self.token_checked_by),
)
updated_log_count = AssetLog.objects.count()
self.assertEqual(response.status_code, 400)
self.assertEqual(initial_log_count, updated_log_count)
@patch("api.authentication.auth.verify_id_token")
def test_authenticated_security_user_create_with_invalid_log_type(
self, mock_verify_id_token
):
mock_verify_id_token.return_value = {"email": self.security_user.email}
log_type = "Invalid"
data = {"asset": self.test_other_asset.id, "log_type": log_type}
response = client.post(
self.asset_logs_url,
data,
HTTP_AUTHORIZATION="Token {}".format(self.token_checked_by),
)
self.assertEqual(
response.data,
{"log_type": ['"{}" is not a valid choice.'.format(log_type)]},
)
self.assertEqual(response.status_code, 400)
@patch("api.authentication.auth.verify_id_token")
def test_authenticated_security_user_create_checkin_without_asset(
self, mock_verify_id_token
):
mock_verify_id_token.return_value = {"email": self.security_user.email}
data = {"log_type": "Checkin"}
response = client.post(
self.asset_logs_url,
data,
HTTP_AUTHORIZATION="Token {}".format(self.token_checked_by),
)
self.assertDictEqual(response.data, {"asset": ["This field is required."]})
self.assertEqual(response.status_code, 400)
@patch("api.authentication.auth.verify_id_token")
def test_authenticated_security_user_view_checkin_detail(
self, mock_verify_id_token
):
mock_verify_id_token.return_value = {"email": self.security_user.email}
response = client.get(
"{}/{}/".format(self.asset_logs_url, self.checkin.id),
HTTP_AUTHORIZATION="Token {}".format(self.token_checked_by),
)
self.assertEqual(response.data["id"], self.checkin.id)
self.assertEqual(response.status_code, 200)
@patch("api.authentication.auth.verify_id_token")
def test_authenticated_security_user_cannot_delete_checkin(
self, mock_verify_id_token
):
mock_verify_id_token.return_value = {"email": self.security_user.email}
response = client.delete(
"{}/{}/".format(self.asset_logs_url, self.checkin.id),
HTTP_AUTHORIZATION="Token {}".format(self.token_checked_by),
)
self.assertEqual(response.data, {"detail": 'Method "DELETE" not allowed.'})
self.assertEqual(response.status_code, 405)
@patch("api.authentication.auth.verify_id_token")
def test_authenticated_security_user_cannot_put_checkin(self, mock_verify_id_token):
mock_verify_id_token.return_value = {"email": self.security_user.email}
response = client.put(
"{}/{}/".format(self.asset_logs_url, self.checkin.id),
HTTP_AUTHORIZATION="Token {}".format(self.token_checked_by),
)
self.assertEqual(response.data, {"detail": 'Method "PUT" not allowed.'})
self.assertEqual(response.status_code, 405)
@patch("api.authentication.auth.verify_id_token")
def test_authenticated_security_user_cannot_patch_checkin(
self, mock_verify_id_token
):
mock_verify_id_token.return_value = {"email": self.security_user.email}
response = client.patch(
"{}/{}/".format(self.asset_logs_url, self.checkin.id),
HTTP_AUTHORIZATION="Token {}".format(self.token_checked_by),
)
self.assertEqual(response.data, {"detail": 'Method "PATCH" not allowed.'})
self.assertEqual(response.status_code, 405)
@patch("api.authentication.auth.verify_id_token")
def test_authenticated_admin_user_gets_filtered_list_of_asset_logs_by_asset_category(
self, mock_verify_id_token
):
mock_verify_id_token.return_value = {"email": self.admin_user.email}
asset_logs_url = (
f"{self.asset_logs_url}?asset_category={self.asset_category.name}"
)
response = client.get(
asset_logs_url, HTTP_AUTHORIZATION=f"Token {self.token_admin}"
)
self.assertEqual(response.status_code, 200)
asset_log = response.data["results"][0]["asset"]
asset_code = asset_log.split("-")[1].strip()
asset = Asset.objects.get(asset_code=asset_code)
category_name = (
asset.model_number.asset_make.asset_type.asset_sub_category.asset_category.name
)
self.assertEqual(category_name, self.asset_category.name)
@patch("api.authentication.auth.verify_id_token")
def test_authenticated_admin_user_gets_filtered_list_of_asset_logs_by_sub_asset_category(
self, mock_verify_id_token
):
mock_verify_id_token.return_value = {"email": self.admin_user.email}
asset_logs_url = (
f"{self.asset_logs_url}?asset_sub_category={self.asset_sub_category.name}"
)
response = client.get(
asset_logs_url, HTTP_AUTHORIZATION=f"Token {self.token_admin}"
)
self.assertEqual(response.status_code, 200)
asset_log = response.data["results"][0]["asset"]
asset_code = asset_log.split("-")[1].strip()
asset = Asset.objects.get(asset_code=asset_code)
sub_category_name = (
asset.model_number.asset_make.asset_type.asset_sub_category.name
)
self.assertEqual(sub_category_name, self.asset_sub_category.name)
@patch("api.authentication.auth.verify_id_token")
def test_authenticated_admin_user_gets_filtered_list_of_asset_logs_by_asset_make(
self, mock_verify_id_token
):
mock_verify_id_token.return_value = {"email": self.admin_user.email}
asset_logs_url = f"{self.asset_logs_url}?asset_make={self.asset_make.name}"
response = client.get(
asset_logs_url, HTTP_AUTHORIZATION=f"Token {self.token_admin}"
)
self.assertEqual(response.status_code, 200)
asset_log = response.data["results"][0]["asset"]
asset_code = asset_log.split("-")[1].strip()
asset = Asset.objects.get(asset_code=asset_code)
asset_make = asset.model_number.asset_make.name
self.assertEqual(asset_make, self.asset_make.name)
| 41.954257
| 93
| 0.66812
| 4,062
| 33,018
| 5.086657
| 0.051945
| 0.039493
| 0.064176
| 0.055948
| 0.887281
| 0.872665
| 0.846094
| 0.838931
| 0.828139
| 0.824364
| 0
| 0.010609
| 0.226331
| 33,018
| 786
| 94
| 42.007634
| 0.798238
| 0.014204
| 0
| 0.631503
| 0
| 0
| 0.138276
| 0.087235
| 0
| 0
| 0
| 0
| 0.132948
| 1
| 0.067919
| false
| 0
| 0.011561
| 0
| 0.080925
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 6
|
1c1b75847fb541a30414ac68c9f93c4c7ab7a43a
| 54
|
py
|
Python
|
kiwi-engine-selector/kiwi/heuristics/user.py
|
bubblegumsoldier/kiwi
|
91701c1806dcfbc1b038fecf7c2cab8bb07a01d4
|
[
"MIT"
] | null | null | null |
kiwi-engine-selector/kiwi/heuristics/user.py
|
bubblegumsoldier/kiwi
|
91701c1806dcfbc1b038fecf7c2cab8bb07a01d4
|
[
"MIT"
] | null | null | null |
kiwi-engine-selector/kiwi/heuristics/user.py
|
bubblegumsoldier/kiwi
|
91701c1806dcfbc1b038fecf7c2cab8bb07a01d4
|
[
"MIT"
] | null | null | null |
def get_heuristic(**kwargs):
return kwargs["user"]
| 27
| 28
| 0.703704
| 7
| 54
| 5.285714
| 0.857143
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.12963
| 54
| 2
| 29
| 27
| 0.787234
| 0
| 0
| 0
| 0
| 0
| 0.072727
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0.5
| true
| 0
| 0
| 0.5
| 1
| 0
| 1
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 1
| 1
| 0
| 0
| 1
| 1
| 0
|
0
| 6
|
1c2f7a621aca507291ac0ddb3de1ae98acabf9a0
| 4,878
|
py
|
Python
|
tests/integration/states/test_nsxt_manager.py
|
jain-prerna/salt-ext-modules-vmware-old
|
89ea6dd77c6d5a35dc55c23adbdc361949a63057
|
[
"Apache-2.0"
] | 1
|
2021-11-02T20:24:19.000Z
|
2021-11-02T20:24:19.000Z
|
tests/integration/states/test_nsxt_manager.py
|
cmcmarrow/salt-ext-modules-vmware
|
c546a9f9ae121b7399dabae82f714117d0ab558d
|
[
"Apache-2.0"
] | null | null | null |
tests/integration/states/test_nsxt_manager.py
|
cmcmarrow/salt-ext-modules-vmware
|
c546a9f9ae121b7399dabae82f714117d0ab558d
|
[
"Apache-2.0"
] | 1
|
2021-12-15T02:46:59.000Z
|
2021-12-15T02:46:59.000Z
|
"""
Integration Tests for nsxt_manager state module
"""
import logging
import pytest
import requests
from requests.auth import HTTPBasicAuth
log = logging.getLogger(__name__)
BASE_URL = "https://{}/api/v1/configs/management"
def _get_manager_config_from_nsxt(nsxt_config):
hostname = nsxt_config["hostname"]
username = nsxt_config["username"]
password = nsxt_config["password"]
cert = nsxt_config.get("cert", False)
return requests.get(
url=BASE_URL.format(hostname), auth=HTTPBasicAuth(username, password), verify=cert
).json()
def _set_manager_config_to_nsxt(nsxt_config, data):
hostname = nsxt_config["hostname"]
username = nsxt_config["username"]
password = nsxt_config["password"]
cert = nsxt_config.get("cert", False)
return requests.put(
url=BASE_URL.format(hostname),
auth=HTTPBasicAuth(username, password),
verify=cert,
data=data,
headers={"content-type": "application/json"},
).json()
@pytest.fixture
def publish_fqdns(nsxt_config):
# get current config
current_manager_config = _get_manager_config_from_nsxt(nsxt_config)
publish_fqdns = current_manager_config["publish_fqdns"]
log.info("Initial publish_fqdns value %s", publish_fqdns)
# yield the current publish_fqdns
yield publish_fqdns
# get current config for latest revision number after tests ran
current_manager_config = _get_manager_config_from_nsxt(nsxt_config)
current_manager_config["publish_fqdns"] = publish_fqdns
log.info("Final publish_fqdns value %s", publish_fqdns)
# restore the config state to original
_set_manager_config_to_nsxt(nsxt_config, current_manager_config)
def test_nsxt_manager(nsxt_config, salt_call_cli, publish_fqdns):
"""
Tests NSX-T Manager State module to verify publish_fqdns_enabled/publish_fqdns_disabled
when it is enabled/disabled in NSX-T Manager
"""
if publish_fqdns:
changes, comment = _execute_publish_fqdns_enabled(nsxt_config, salt_call_cli)
assert not changes
assert comment == "publish_fqdns is already set to True"
changes, comment = _execute_publish_fqdns_disabled(nsxt_config, salt_call_cli)
assert dict(changes)["new"]["publish_fqdns"] is False
assert dict(changes)["old"]["publish_fqdns"] is True
assert comment == "publish_fqdns has been set to False"
changes, comment = _execute_publish_fqdns_disabled(nsxt_config, salt_call_cli)
assert not changes
assert comment == "publish_fqdns is already set to False"
changes, comment = _execute_publish_fqdns_enabled(nsxt_config, salt_call_cli)
assert dict(changes)["new"]["publish_fqdns"] is True
assert dict(changes)["old"]["publish_fqdns"] is False
assert comment == "publish_fqdns has been set to True"
else:
changes, comment = _execute_publish_fqdns_disabled(nsxt_config, salt_call_cli)
assert not changes
assert comment == "publish_fqdns is already set to False"
changes, comment = _execute_publish_fqdns_enabled(nsxt_config, salt_call_cli)
assert dict(changes)["new"]["publish_fqdns"] is True
assert dict(changes)["old"]["publish_fqdns"] is False
assert comment == "publish_fqdns has been set to True"
changes, comment = _execute_publish_fqdns_enabled(nsxt_config, salt_call_cli)
assert not changes
assert comment == "publish_fqdns is already set to True"
changes, comment = _execute_publish_fqdns_disabled(nsxt_config, salt_call_cli)
assert dict(changes)["new"]["publish_fqdns"] is False
assert dict(changes)["old"]["publish_fqdns"] is True
assert comment == "publish_fqdns has been set to False"
def _execute_publish_fqdns_enabled(nsxt_config, salt_call_cli):
hostname = nsxt_config["hostname"]
username = nsxt_config["username"]
password = nsxt_config["password"]
response = salt_call_cli.run(
"state.single",
"nsxt_manager.publish_fqdns_enabled",
name="publish_fqdns_enabled",
hostname=hostname,
username=username,
password=password,
verify_ssl=False,
).json
result = dict(list(response.values())[0])
return result.get("changes"), result.get("comment")
def _execute_publish_fqdns_disabled(nsxt_config, salt_call_cli):
hostname = nsxt_config["hostname"]
username = nsxt_config["username"]
password = nsxt_config["password"]
response = salt_call_cli.run(
"state.single",
"nsxt_manager.publish_fqdns_disabled",
name="publish_fqdns_disabled",
hostname=hostname,
username=username,
password=password,
verify_ssl=False,
).json
result = dict(list(response.values())[0])
return result.get("changes"), result.get("comment")
| 35.093525
| 91
| 0.708282
| 611
| 4,878
| 5.348609
| 0.157119
| 0.165239
| 0.043758
| 0.060588
| 0.787638
| 0.767748
| 0.737148
| 0.70716
| 0.70716
| 0.690942
| 0
| 0.000766
| 0.196802
| 4,878
| 138
| 92
| 35.347826
| 0.833333
| 0.067856
| 0
| 0.659794
| 0
| 0
| 0.18347
| 0.024817
| 0
| 0
| 0
| 0
| 0.206186
| 1
| 0.061856
| false
| 0.082474
| 0.041237
| 0
| 0.14433
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
|
0
| 6
|
1c48a04f443135c1a234bcf0ad9484b86d1bdd54
| 82
|
py
|
Python
|
src/westpa/westext/adaptvoronoi/__init__.py
|
burntyellow/adelman_ci
|
cca251a51b34843faed0275cce01d7a307829993
|
[
"MIT"
] | 140
|
2015-01-07T23:30:36.000Z
|
2022-03-28T17:15:30.000Z
|
src/westext/adaptvoronoi/__init__.py
|
burntyellow/westpa
|
9dc62478fcef0001b9c038cd56a40b6be1b9d64a
|
[
"MIT"
] | 157
|
2015-01-03T03:38:36.000Z
|
2022-03-31T14:12:16.000Z
|
src/westext/adaptvoronoi/__init__.py
|
burntyellow/westpa
|
9dc62478fcef0001b9c038cd56a40b6be1b9d64a
|
[
"MIT"
] | 56
|
2015-01-02T21:21:40.000Z
|
2022-03-03T16:27:54.000Z
|
from . import adaptVor_driver
from .adaptVor_driver import AdaptiveVoronoiDriver
| 20.5
| 50
| 0.865854
| 9
| 82
| 7.666667
| 0.555556
| 0.405797
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.109756
| 82
| 3
| 51
| 27.333333
| 0.945205
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| true
| 0
| 1
| 0
| 1
| 0
| 1
| 0
| 0
| null | 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 0
| 1
| 0
|
0
| 6
|
1c6aa28701fa0ede0cd386731b68432d24ef2a89
| 14,474
|
py
|
Python
|
tests/test_sphinx/test_sphinx_builds.py
|
daobook/myst-parser
|
153ef318cd6f2c52abce7844218e95f18a12653b
|
[
"MIT"
] | null | null | null |
tests/test_sphinx/test_sphinx_builds.py
|
daobook/myst-parser
|
153ef318cd6f2c52abce7844218e95f18a12653b
|
[
"MIT"
] | null | null | null |
tests/test_sphinx/test_sphinx_builds.py
|
daobook/myst-parser
|
153ef318cd6f2c52abce7844218e95f18a12653b
|
[
"MIT"
] | null | null | null |
"""Uses sphinx's pytest fixture to run builds.
see conftest.py for fixture usage
NOTE: sphinx 3 & 4 regress against different output files,
the major difference being sphinx 4 uses docutils 0.17,
which uses semantic HTML tags
(e.g. converting `<div class="section">` to `<section>`)
"""
import os
import re
import pytest
import sphinx
from docutils import VersionInfo, __version_info__
SOURCE_DIR = os.path.abspath(os.path.join(os.path.dirname(__file__), "sourcedirs"))
@pytest.mark.sphinx(
buildername="html", srcdir=os.path.join(SOURCE_DIR, "basic"), freshenv=True
)
def test_basic(
app,
status,
warning,
get_sphinx_app_doctree,
get_sphinx_app_output,
remove_sphinx_builds,
):
"""basic test."""
app.build()
assert "build succeeded" in status.getvalue() # Build succeeded
warnings = warning.getvalue().strip()
assert warnings == ""
get_sphinx_app_doctree(
app,
docname="content",
regress=True,
regress_ext=f".sphinx{sphinx.version_info[0]}.xml",
)
get_sphinx_app_doctree(
app,
docname="content",
resolve=True,
regress=True,
regress_ext=f".sphinx{sphinx.version_info[0]}.xml",
)
get_sphinx_app_output(
app,
filename="content.html",
regress_html=True,
regress_ext=f".sphinx{sphinx.version_info[0]}.html",
)
assert app.env.metadata["content"] == {
"author": "Chris Sewell",
"authors": ["Chris Sewell", "Chris Hodgraf"],
"organization": "EPFL",
"address": "1 Cedar Park Close\nThundersley\nEssex\n",
"contact": "https://example.com",
"version": "1.0",
"revision": "1.1",
"status": "good",
"date": "2/12/1985",
"copyright": "MIT",
"other": "Something else",
"wordcount": {"minutes": 0, "words": 57},
}
@pytest.mark.sphinx(
buildername="html",
srcdir=os.path.join(SOURCE_DIR, "references"),
freshenv=True,
)
def test_references(
app,
status,
warning,
get_sphinx_app_doctree,
get_sphinx_app_output,
remove_sphinx_builds,
):
"""Test reference resolution."""
app.build()
assert "build succeeded" in status.getvalue() # Build succeeded
warnings = warning.getvalue().strip()
assert warnings == ""
try:
get_sphinx_app_doctree(app, docname="index", regress=True)
finally:
get_sphinx_app_doctree(app, docname="index", resolve=True, regress=True)
get_sphinx_app_output(
app,
filename="index.html",
regress_html=True,
regress_ext=f".sphinx{sphinx.version_info[0]}.html",
)
@pytest.mark.sphinx(
buildername="singlehtml",
srcdir=os.path.join(SOURCE_DIR, "references_singlehtml"),
freshenv=True,
confoverrides={"nitpicky": True},
)
def test_references_singlehtml(
app,
status,
warning,
get_sphinx_app_doctree,
get_sphinx_app_output,
remove_sphinx_builds,
):
"""Test reference resolution for singlehtml builds."""
app.build()
assert "build succeeded" in status.getvalue() # Build succeeded
warnings = warning.getvalue().strip()
assert warnings == ""
# try:
# get_sphinx_app_doctree(app, docname="index", regress=True)
# finally:
# get_sphinx_app_doctree(app, docname="index", resolve=True, regress=True)
try:
get_sphinx_app_doctree(
app,
docname="other/other",
regress=True,
replace={"other\\other.md": "other/other.md"},
)
finally:
get_sphinx_app_doctree(
app,
docname="other/other",
resolve=True,
regress=True,
replace={"other\\other.md": "other/other.md"},
)
get_sphinx_app_output(
app,
filename="index.html",
buildername="singlehtml",
regress_html=True,
regress_ext=f".sphinx{sphinx.version_info[0]}.html",
)
@pytest.mark.sphinx(
buildername="html",
srcdir=os.path.join(SOURCE_DIR, "heading_slug_func"),
freshenv=True,
)
def test_heading_slug_func(
app,
status,
warning,
get_sphinx_app_doctree,
get_sphinx_app_output,
remove_sphinx_builds,
):
"""Test heading_slug_func configuration."""
app.build()
assert "build succeeded" in status.getvalue() # Build succeeded
warnings = warning.getvalue().strip()
assert warnings == ""
try:
get_sphinx_app_doctree(app, docname="index", regress=True)
finally:
get_sphinx_app_doctree(app, docname="index", resolve=True, regress=True)
get_sphinx_app_output(
app,
filename="index.html",
regress_html=True,
regress_ext=f".sphinx{sphinx.version_info[0]}.html",
)
@pytest.mark.sphinx(
buildername="html",
srcdir=os.path.join(SOURCE_DIR, "extended_syntaxes"),
freshenv=True,
)
def test_extended_syntaxes(
app,
status,
warning,
get_sphinx_app_doctree,
get_sphinx_app_output,
remove_sphinx_builds,
monkeypatch,
):
"""test setting addition configuration values."""
from myst_parser.sphinx_renderer import SphinxRenderer
monkeypatch.setattr(SphinxRenderer, "_random_label", lambda self: "mock-uuid")
app.build()
assert "build succeeded" in status.getvalue() # Build succeeded
warnings = warning.getvalue().strip()
assert warnings == ""
try:
get_sphinx_app_doctree(
app,
docname="index",
regress=True,
regress_ext=f".sphinx{sphinx.version_info[0]}.xml",
)
finally:
get_sphinx_app_output(
app,
filename="index.html",
regress_html=True,
regress_ext=f".sphinx{sphinx.version_info[0]}.html",
)
@pytest.mark.sphinx(
buildername="html", srcdir=os.path.join(SOURCE_DIR, "includes"), freshenv=True
)
def test_includes(
app,
status,
warning,
get_sphinx_app_doctree,
get_sphinx_app_output,
remove_sphinx_builds,
):
"""Test of include directive."""
app.build()
assert "build succeeded" in status.getvalue() # Build succeeded
warnings = warning.getvalue().strip()
assert warnings == ""
try:
get_sphinx_app_doctree(
app,
docname="index",
regress=True,
regress_ext=f".sphinx{sphinx.version_info[0]}.xml",
# fix for Windows CI
replace={
r"subfolder\example2.jpg": "subfolder/example2.jpg",
r"subfolder\\example2.jpg": "subfolder/example2.jpg",
r"subfolder\\\\example2.jpg": "subfolder/example2.jpg",
},
)
finally:
get_sphinx_app_output(
app,
filename="index.html",
regress_html=True,
regress_ext=f".sphinx{sphinx.version_info[0]}.html",
replace={
r"'subfolder\\example2'": "'subfolder/example2'",
r'uri="subfolder\\example2"': 'uri="subfolder/example2"',
"_images/example21.jpg": "_images/example2.jpg",
},
)
@pytest.mark.skipif(
__version_info__ < VersionInfo(0, 17, 0, "final", 0, True),
reason="parser option added in docutils 0.17",
)
@pytest.mark.sphinx(
buildername="html",
srcdir=os.path.join(SOURCE_DIR, "include_from_rst"),
freshenv=True,
)
def test_include_from_rst(
app,
status,
warning,
get_sphinx_app_doctree,
get_sphinx_app_output,
remove_sphinx_builds,
):
"""Test of include directive inside RST file."""
app.build()
assert "build succeeded" in status.getvalue() # Build succeeded
warnings = warning.getvalue().strip()
assert warnings == ""
get_sphinx_app_doctree(
app,
docname="index",
regress=True,
regress_ext=".xml",
)
@pytest.mark.sphinx(
buildername="html", srcdir=os.path.join(SOURCE_DIR, "footnotes"), freshenv=True
)
def test_footnotes(
app,
status,
warning,
get_sphinx_app_doctree,
get_sphinx_app_output,
remove_sphinx_builds,
):
"""Test of include directive."""
app.build()
assert "build succeeded" in status.getvalue() # Build succeeded
warnings = warning.getvalue().strip()
assert warnings == ""
try:
get_sphinx_app_doctree(app, docname="footnote_md", regress=True)
finally:
get_sphinx_app_output(
app,
filename="footnote_md.html",
regress_html=True,
regress_ext=f".sphinx{sphinx.version_info[0]}.html",
)
@pytest.mark.sphinx(
buildername="html",
srcdir=os.path.join(SOURCE_DIR, "commonmark_only"),
freshenv=True,
)
def test_commonmark_only(
app,
status,
warning,
get_sphinx_app_doctree,
get_sphinx_app_output,
remove_sphinx_builds,
):
"""test setting addition configuration values."""
app.build()
assert "build succeeded" in status.getvalue() # Build succeeded
warnings = warning.getvalue().strip()
assert "lexer name '{note}'" in warnings
try:
get_sphinx_app_doctree(app, docname="index", regress=True)
finally:
get_sphinx_app_output(
app,
filename="index.html",
regress_html=True,
regress_ext=f".sphinx{sphinx.version_info[0]}.html",
)
@pytest.mark.sphinx(
buildername="html",
srcdir=os.path.join(SOURCE_DIR, "substitutions"),
freshenv=True,
)
def test_substitutions(
app,
status,
warning,
get_sphinx_app_doctree,
get_sphinx_app_output,
remove_sphinx_builds,
file_regression,
):
"""test setting addition configuration values."""
app.build()
assert "build succeeded" in status.getvalue() # Build succeeded
warnings = warning.getvalue().strip()
assert warnings == ""
try:
get_sphinx_app_doctree(app, docname="index", regress=True)
file_regression.check(
get_sphinx_app_doctree(app, docname="other").pformat(),
extension=".other.xml",
)
finally:
get_sphinx_app_output(app, filename="index.html", regress_html=True)
@pytest.mark.sphinx(
buildername="gettext", srcdir=os.path.join(SOURCE_DIR, "gettext"), freshenv=True
)
def test_gettext(
app,
status,
warning,
get_sphinx_app_output,
remove_sphinx_builds,
file_regression,
):
"""Test gettext message extraction."""
app.build()
assert "build succeeded" in status.getvalue() # Build succeeded
warnings = warning.getvalue().strip()
assert warnings == ""
output = get_sphinx_app_output(app, filename="index.pot", buildername="gettext")
output = re.sub(r"POT-Creation-Date: [0-9: +-]+", "POT-Creation-Date: ", output)
output = re.sub(r"Copyright \(C\) [0-9]{4}", "Copyright (C) XXXX", output)
file_regression.check(output, extension=f".sphinx{sphinx.version_info[0]}.pot")
@pytest.mark.sphinx(
buildername="html",
srcdir=os.path.join(SOURCE_DIR, "gettext"),
freshenv=True,
confoverrides={"language": "fr", "gettext_compact": False, "locale_dirs": ["."]},
)
def test_gettext_html(
app,
status,
warning,
get_sphinx_app_doctree,
get_sphinx_app_output,
remove_sphinx_builds,
):
"""Test gettext message extraction."""
app.build()
assert "build succeeded" in status.getvalue() # Build succeeded
warnings = warning.getvalue().strip()
assert warnings == ""
try:
get_sphinx_app_doctree(
app,
docname="index",
regress=True,
regress_ext=f".sphinx{sphinx.version_info[0]}.xml",
)
finally:
get_sphinx_app_doctree(
app,
docname="index",
resolve=True,
regress=True,
regress_ext=f".sphinx{sphinx.version_info[0]}.xml",
)
get_sphinx_app_output(
app,
filename="index.html",
regress_html=True,
regress_ext=f".sphinx{sphinx.version_info[0]}.html",
)
@pytest.mark.sphinx(
buildername="gettext",
srcdir=os.path.join(SOURCE_DIR, "gettext"),
freshenv=True,
confoverrides={
"gettext_additional_targets": [
"index",
"literal-block",
"doctest-block",
"raw",
"image",
],
},
)
def test_gettext_additional_targets(
app,
status,
warning,
get_sphinx_app_output,
remove_sphinx_builds,
file_regression,
):
"""Test gettext message extraction."""
app.build()
assert "build succeeded" in status.getvalue() # Build succeeded
warnings = warning.getvalue().strip()
assert warnings == ""
output = get_sphinx_app_output(app, filename="index.pot", buildername="gettext")
output = re.sub(r"POT-Creation-Date: [0-9: +-]+", "POT-Creation-Date: ", output)
output = re.sub(r"Copyright \(C\) [0-9]{4}", "Copyright (C) XXXX", output)
file_regression.check(output, extension=f".sphinx{sphinx.version_info[0]}.pot")
@pytest.mark.sphinx(
buildername="html", srcdir=os.path.join(SOURCE_DIR, "mathjax"), freshenv=True
)
def test_mathjax_warning(
app,
status,
warning,
remove_sphinx_builds,
):
"""Test mathjax config override warning."""
app.build()
assert "build succeeded" in status.getvalue() # Build succeeded
warnings = warning.getvalue().strip()
assert (
"overridden by myst-parser: 'other' -> 'tex2jax_process|mathjax_process|math|output_area'"
in warnings
)
@pytest.mark.sphinx(
buildername="html",
srcdir=os.path.join(SOURCE_DIR, "fieldlist"),
freshenv=True,
)
def test_fieldlist_extension(
app,
status,
warning,
get_sphinx_app_doctree,
get_sphinx_app_output,
remove_sphinx_builds,
):
"""test setting addition configuration values."""
app.build()
assert "build succeeded" in status.getvalue() # Build succeeded
warnings = warning.getvalue().strip()
assert warnings == ""
try:
get_sphinx_app_doctree(
app,
docname="index",
regress=True,
regress_ext=f".sphinx{sphinx.version_info[0]}.xml",
)
finally:
get_sphinx_app_output(
app,
filename="index.html",
regress_html=True,
regress_ext=f".sphinx{sphinx.version_info[0]}.html",
)
| 26.412409
| 98
| 0.624361
| 1,639
| 14,474
| 5.305064
| 0.129957
| 0.06107
| 0.081426
| 0.069925
| 0.772053
| 0.767683
| 0.767683
| 0.753882
| 0.740541
| 0.725474
| 0
| 0.006606
| 0.246926
| 14,474
| 547
| 99
| 26.460695
| 0.791101
| 0.085049
| 0
| 0.729847
| 0
| 0
| 0.187414
| 0.07807
| 0
| 0
| 0
| 0
| 0.067538
| 1
| 0.03268
| false
| 0
| 0.013072
| 0
| 0.045752
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 6
|
1c75fc5aca41bac52b915f9a38b00c9f2fceb3fa
| 7,907
|
py
|
Python
|
xooa_api/blockchain.py
|
Xooa/xooa-python-sdk
|
b364a576b324d1dfaaa383586c38586d855ed95f
|
[
"Apache-2.0"
] | 6
|
2019-07-19T19:45:45.000Z
|
2021-12-16T13:18:01.000Z
|
xooa_api/blockchain.py
|
Xooa/xooa-python-sdk
|
b364a576b324d1dfaaa383586c38586d855ed95f
|
[
"Apache-2.0"
] | null | null | null |
xooa_api/blockchain.py
|
Xooa/xooa-python-sdk
|
b364a576b324d1dfaaa383586c38586d855ed95f
|
[
"Apache-2.0"
] | null | null | null |
#
# Python SDK for Xooa
#
# Copyright 2018 Xooa
#
# Licensed under the Apache License, Version 2.0 (the "License"); you may not use this file except
# in compliance with the License. You may obtain a copy of the License at:
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software distributed under the License is distributed
# on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the License
# for the specific language governing permissions and limitations under the License.
#
# Author: Rahul Kamboj
#
# coding: utf-8
from __future__ import absolute_import
# python 2 and python 3 compatibility library
import six
import json
from .xooa_exceptions import XooaApiException, XooaRequestTimeoutException
import requests
class BlockchainApi(object):
""" Block chain API class to create requests to block chain API."""
def block_data(self, xooa_client, block_number, timeout, **kwargs):
""" Call the BlockByNumber api
:param xooa_client: Includes Headers and URL to make request
:param int block_number: Block number to fetch data (required)
:param timeout:
:param kwargs: Query Arguments for the api including async and timeout
:return:
"""
logger = xooa_client.xooa_logger
try:
req_params = xooa_client.req_params
logger.info('Calling Block Chain API for Block By Number')
params = locals()
for key, val in six.iteritems(params['kwargs']):
if key == 'asyncKey':
params[key] = val
del params['kwargs']
# verify the required parameter 'block_number' is set
if block_number is None:
logger.error("Missing the required parameter `block_number` when calling `block_data`")
raise ValueError("Missing the required parameter `block_number` when calling `block_data`")
query_params = {}
if 'asyncKey' in params:
query_params['async'] = params['asyncKey']
else:
query_params['async'] = 'false'
if timeout is not None:
query_params['timeout'] = timeout
url_suffix = '/block/' + str(block_number)
url = req_params['base_url'] + url_suffix
headers = req_params['headers']
logger.info("Sending request to get Block By Number...")
response = requests.get(url, params=query_params, headers=headers)
response_object = json.loads(response.text)
if query_params['async'] == 'true':
if response.status_code == 202:
return response_object
else:
raise XooaApiException(response.status_code, response_object)
else:
if response.status_code == 200:
return response_object
elif response.status_code == 202:
raise XooaRequestTimeoutException(response_object['resultId'], response_object['resultURL'])
else:
raise XooaApiException(response.status_code, response_object)
except XooaApiException:
raise
except XooaRequestTimeoutException:
raise
except Exception:
raise XooaApiException("0", "Exception in GetBlockByNumber")
def block_height(self, xooa_client, timeout, **kwargs):
""" Call the CurrentBlock api
:param xooa_client: Includes Headers and URL to make request
:param timeout:
:param kwargs: Query Arguments for the api including async and timeout
:return:
"""
logger = xooa_client.xooa_logger
try:
req_params = xooa_client.req_params
logger.info('Calling Block Chain API for Block Height.')
params = locals()
for key, val in six.iteritems(params['kwargs']):
if key == "asyncKey":
params[key] = val
del params['kwargs']
query_params = {}
if 'asyncKey' in params:
query_params['async'] = params['asyncKey']
else:
query_params['async'] = 'false'
if timeout is not None:
query_params['timeout'] = timeout
url_suffix = '/block/current'
url = req_params['base_url'] + url_suffix
headers = req_params['headers']
logger.info("Requesting blockchain height...")
response = requests.get(url, params=query_params, headers=headers)
response_object = json.loads(response.text)
if query_params['async'] == 'true':
if response.status_code == 202:
return response_object
else:
raise XooaApiException(response.status_code, response_object)
else:
if response.status_code == 200:
return response_object
elif response.status_code == 202:
raise XooaRequestTimeoutException(response_object['resultId'], response_object['resultURL'])
else:
raise XooaApiException(response.status_code, response_object)
except XooaApiException:
raise
except XooaRequestTimeoutException:
raise
except Exception:
raise XooaApiException("0", "Exception in GetBlockHeight")
def get_transaction_by_transaction_id(self, xooa_client, transaction_id, timeout, **kwargs):
""" Call the Transaction api
:param xooa_client: Includes Headers and URL to make request
:param transaction_id: Transaction Id to get transaction details
:param timeout:
:param kwargs: Query Arguments for the api including async and timeout
:return:
"""
logger = xooa_client.xooa_logger
try:
req_params = xooa_client.req_params
logger.info('Calling Block Chain API for Transaction.')
params = locals()
for key, val in six.iteritems(params['kwargs']):
if key == 'asyncKey':
params[key] = val
del params['kwargs']
query_params = {}
if 'asyncKey' in params:
query_params['async'] = params['asyncKey']
else:
query_params['async'] = 'false'
if timeout is not None:
query_params['timeout'] = timeout
url_suffix = '/transactions/' + str(transaction_id)
url = req_params['base_url'] + url_suffix
headers = req_params['headers']
logger.info("Requesting Transaction details...")
response = requests.get(url, params=query_params, headers=headers)
response_object = json.loads(response.text)
if query_params['async'] == 'true':
if response.status_code == 202:
return response_object
else:
raise XooaApiException(response.status_code, response_object)
else:
if response.status_code == 200:
return response_object
elif response.status_code == 202:
raise XooaRequestTimeoutException(response_object['resultId'], response_object['resultURL'])
else:
raise XooaApiException(response.status_code, response_object)
except XooaApiException:
raise
except XooaRequestTimeoutException:
raise
except Exception:
raise XooaApiException("0", "Exception in GetBlockHeight")
| 30.411538
| 114
| 0.590995
| 811
| 7,907
| 5.621455
| 0.202219
| 0.064488
| 0.059224
| 0.026322
| 0.727791
| 0.720991
| 0.720991
| 0.720991
| 0.720991
| 0.720991
| 0
| 0.007746
| 0.330593
| 7,907
| 259
| 115
| 30.528958
| 0.85358
| 0.181105
| 0
| 0.825758
| 0
| 0
| 0.125377
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0.022727
| false
| 0
| 0.037879
| 0
| 0.113636
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 6
|
98cb7bb3897004160678d1591aa278dd6694ffbb
| 46
|
py
|
Python
|
src/opensignals/__init__.py
|
jconsidi/opensignals
|
5a3158fc55cd1c50719e6f4c77d47ee1823b4e77
|
[
"Apache-2.0"
] | 25
|
2021-06-28T14:54:27.000Z
|
2022-03-22T19:03:00.000Z
|
src/opensignals/__init__.py
|
jconsidi/opensignals
|
5a3158fc55cd1c50719e6f4c77d47ee1823b4e77
|
[
"Apache-2.0"
] | 21
|
2021-06-30T02:52:18.000Z
|
2022-01-31T08:23:47.000Z
|
src/opensignals/__init__.py
|
jconsidi/opensignals
|
5a3158fc55cd1c50719e6f4c77d47ee1823b4e77
|
[
"Apache-2.0"
] | 13
|
2021-06-30T02:52:54.000Z
|
2022-02-27T17:14:04.000Z
|
from opensignals.__about__ import __version__
| 23
| 45
| 0.891304
| 5
| 46
| 6.6
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.086957
| 46
| 1
| 46
| 46
| 0.785714
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| true
| 0
| 1
| 0
| 1
| 0
| 1
| 1
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 0
| 1
| 0
|
0
| 6
|
c71071bb219f90b6b7fa20e7889030cdadcae16c
| 28,665
|
py
|
Python
|
mks/migrations/0009_add_action_stream.py
|
daonb/Open-Knesset
|
a169f03965b36f753968a18c7c8165156c0cebdd
|
[
"BSD-3-Clause"
] | 69
|
2015-02-03T12:02:56.000Z
|
2022-02-16T13:08:01.000Z
|
mks/migrations/0009_add_action_stream.py
|
OriHoch/Open-Knesset
|
538bcdc2632d8d17a8ddddbc4567106684b9996b
|
[
"BSD-3-Clause"
] | 446
|
2015-01-01T11:10:33.000Z
|
2021-11-01T08:15:39.000Z
|
mks/migrations/0009_add_action_stream.py
|
OriHoch/Open-Knesset
|
538bcdc2632d8d17a8ddddbc4567106684b9996b
|
[
"BSD-3-Clause"
] | 67
|
2015-01-01T09:13:58.000Z
|
2021-11-01T07:51:08.000Z
|
# encoding: utf-8
import datetime
from south.db import db
from south.v2 import DataMigration
from django.db import models
class Migration(DataMigration):
def _get_FIELD_display(self, field):
value = getattr(self, field.attname)
return force_unicode(dict(field.flatchoices).get(value, value), strings_only=True)
depends_on = (
('committees', '0001_startapp_committees'),
('laws', '0001_initial'),
)
def forwards(self, orm):
from actstream import action
print 'addding committee actions'
for c in orm['committees.CommitteeMeeting'].objects.all():
for m in c.mks_attended.all():
action.send(m, verb='attended', target=c,
description='committee meeting', timestamp=c.date)
print 'addding posts actions'
for f in orm['planet.Feed'].objects.all():
member = orm.Member.objects.get(pk=orm['links.Link'].objects.get(url=f.url).object_pk)
for p in f.post_set.all():
action.send(member, verb='posted', target=p, timestamp=p.date_modified or p.date_created)
print 'adding votes actions (may take a while)'
from laws.enums import VOTE_ACTION_TYPE_CHOICES
choice_dict = dict(VOTE_ACTION_TYPE_CHOICES)
for instance in orm['laws.VoteAction'].objects.all():
action.send(instance.member, verb='voted',
description=unicode(choice_dict[instance.type]),
target=instance.vote,
timestamp=instance.vote.time)
def backwards(self, orm):
"Write your backwards methods here."
models = {
'auth.group': {
'Meta': {'object_name': 'Group'},
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'name': ('django.db.models.fields.CharField', [], {'unique': 'True', 'max_length': '80'}),
'permissions': ('django.db.models.fields.related.ManyToManyField', [], {'to': "orm['auth.Permission']", 'symmetrical': 'False', 'blank': 'True'})
},
'auth.permission': {
'Meta': {'unique_together': "(('content_type', 'codename'),)", 'object_name': 'Permission'},
'codename': ('django.db.models.fields.CharField', [], {'max_length': '100'}),
'content_type': ('django.db.models.fields.related.ForeignKey', [], {'to': "orm['contenttypes.ContentType']"}),
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'name': ('django.db.models.fields.CharField', [], {'max_length': '50'})
},
'auth.user': {
'Meta': {'object_name': 'User'},
'date_joined': ('django.db.models.fields.DateTimeField', [], {'default': 'datetime.datetime.now'}),
'email': ('django.db.models.fields.EmailField', [], {'max_length': '75', 'blank': 'True'}),
'first_name': ('django.db.models.fields.CharField', [], {'max_length': '30', 'blank': 'True'}),
'groups': ('django.db.models.fields.related.ManyToManyField', [], {'to': "orm['auth.Group']", 'symmetrical': 'False', 'blank': 'True'}),
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'is_active': ('django.db.models.fields.BooleanField', [], {'default': 'True', 'blank': 'True'}),
'is_staff': ('django.db.models.fields.BooleanField', [], {'default': 'False', 'blank': 'True'}),
'is_superuser': ('django.db.models.fields.BooleanField', [], {'default': 'False', 'blank': 'True'}),
'last_login': ('django.db.models.fields.DateTimeField', [], {'default': 'datetime.datetime.now'}),
'last_name': ('django.db.models.fields.CharField', [], {'max_length': '30', 'blank': 'True'}),
'password': ('django.db.models.fields.CharField', [], {'max_length': '128'}),
'user_permissions': ('django.db.models.fields.related.ManyToManyField', [], {'to': "orm['auth.Permission']", 'symmetrical': 'False', 'blank': 'True'}),
'username': ('django.db.models.fields.CharField', [], {'unique': 'True', 'max_length': '30'})
},
'committees.committee': {
'Meta': {'object_name': 'Committee'},
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'members': ('django.db.models.fields.related.ManyToManyField', [], {'related_name': "'committees'", 'symmetrical': 'False', 'to': "orm['mks.Member']"}),
'name': ('django.db.models.fields.CharField', [], {'max_length': '256'})
},
'committees.committeemeeting': {
'Meta': {'object_name': 'CommitteeMeeting'},
'committee': ('django.db.models.fields.related.ForeignKey', [], {'to': "orm['committees.Committee']"}),
'date': ('django.db.models.fields.DateField', [], {}),
'date_string': ('django.db.models.fields.CharField', [], {'max_length': '256'}),
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'mks_attended': ('django.db.models.fields.related.ManyToManyField', [], {'related_name': "'committee_meetings'", 'symmetrical': 'False', 'to': "orm['mks.Member']"}),
'protocol_text': ('django.db.models.fields.TextField', [], {'null': 'True', 'blank': 'True'}),
'topics': ('django.db.models.fields.TextField', [], {'null': 'True', 'blank': 'True'}),
'votes_mentioned': ('django.db.models.fields.related.ManyToManyField', [], {'symmetrical': 'False', 'related_name': "'committee_meetings'", 'blank': 'True', 'to': "orm['laws.Vote']"})
},
'contenttypes.contenttype': {
'Meta': {'unique_together': "(('app_label', 'model'),)", 'object_name': 'ContentType', 'db_table': "'django_content_type'"},
'app_label': ('django.db.models.fields.CharField', [], {'max_length': '100'}),
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'model': ('django.db.models.fields.CharField', [], {'max_length': '100'}),
'name': ('django.db.models.fields.CharField', [], {'max_length': '100'})
},
'laws.knessetproposal': {
'Meta': {'object_name': 'KnessetProposal'},
'booklet_number': ('django.db.models.fields.IntegerField', [], {'null': 'True', 'blank': 'True'}),
'committee': ('django.db.models.fields.related.ForeignKey', [], {'blank': 'True', 'related_name': "'bills'", 'null': 'True', 'to': "orm['committees.Committee']"}),
'committee_meetings': ('django.db.models.fields.related.ManyToManyField', [], {'blank': 'True', 'related_name': "'laws_knessetproposal_related'", 'null': 'True', 'symmetrical': 'False', 'to': "orm['committees.CommitteeMeeting']"}),
'date': ('django.db.models.fields.DateField', [], {'null': 'True', 'blank': 'True'}),
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'knesset_id': ('django.db.models.fields.IntegerField', [], {'null': 'True', 'blank': 'True'}),
'law': ('django.db.models.fields.related.ForeignKey', [], {'blank': 'True', 'related_name': "'laws_knessetproposal_related'", 'null': 'True', 'to': "orm['laws.Law']"}),
'originals': ('django.db.models.fields.related.ManyToManyField', [], {'blank': 'True', 'related_name': "'knesset_proposals'", 'null': 'True', 'symmetrical': 'False', 'to': "orm['laws.PrivateProposal']"}),
'source_url': ('django.db.models.fields.URLField', [], {'max_length': '1024', 'null': 'True', 'blank': 'True'}),
'title': ('django.db.models.fields.CharField', [], {'max_length': '1000'}),
'votes': ('django.db.models.fields.related.ManyToManyField', [], {'blank': 'True', 'related_name': "'laws_knessetproposal_related'", 'null': 'True', 'symmetrical': 'False', 'to': "orm['laws.Vote']"})
},
'laws.law': {
'Meta': {'object_name': 'Law'},
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'title': ('django.db.models.fields.CharField', [], {'max_length': '1000'})
},
'laws.membervotingstatistics': {
'Meta': {'object_name': 'MemberVotingStatistics'},
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'member': ('django.db.models.fields.related.OneToOneField', [], {'related_name': "'voting_statistics'", 'unique': 'True', 'to': "orm['mks.Member']"})
},
'laws.partyvotingstatistics': {
'Meta': {'object_name': 'PartyVotingStatistics'},
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'party': ('django.db.models.fields.related.OneToOneField', [], {'related_name': "'voting_statistics'", 'unique': 'True', 'to': "orm['mks.Party']"})
},
'laws.privateproposal': {
'Meta': {'object_name': 'PrivateProposal'},
'committee_meetings': ('django.db.models.fields.related.ManyToManyField', [], {'blank': 'True', 'related_name': "'laws_privateproposal_related'", 'null': 'True', 'symmetrical': 'False', 'to': "orm['committees.CommitteeMeeting']"}),
'date': ('django.db.models.fields.DateField', [], {'null': 'True', 'blank': 'True'}),
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'joiners': ('django.db.models.fields.related.ManyToManyField', [], {'blank': 'True', 'related_name': "'bills_joined'", 'null': 'True', 'symmetrical': 'False', 'to': "orm['mks.Member']"}),
'knesset_id': ('django.db.models.fields.IntegerField', [], {'null': 'True', 'blank': 'True'}),
'law': ('django.db.models.fields.related.ForeignKey', [], {'blank': 'True', 'related_name': "'laws_privateproposal_related'", 'null': 'True', 'to': "orm['laws.Law']"}),
'proposal_id': ('django.db.models.fields.IntegerField', [], {'null': 'True', 'blank': 'True'}),
'proposers': ('django.db.models.fields.related.ManyToManyField', [], {'blank': 'True', 'related_name': "'bills'", 'null': 'True', 'symmetrical': 'False', 'to': "orm['mks.Member']"}),
'source_url': ('django.db.models.fields.URLField', [], {'max_length': '1024', 'null': 'True', 'blank': 'True'}),
'title': ('django.db.models.fields.CharField', [], {'max_length': '1000'}),
'votes': ('django.db.models.fields.related.ManyToManyField', [], {'blank': 'True', 'related_name': "'laws_privateproposal_related'", 'null': 'True', 'symmetrical': 'False', 'to': "orm['laws.Vote']"})
},
'laws.vote': {
'Meta': {'object_name': 'Vote'},
'against_party': ('django.db.models.fields.IntegerField', [], {'null': 'True', 'blank': 'True'}),
'controversy': ('django.db.models.fields.IntegerField', [], {'null': 'True', 'blank': 'True'}),
'full_text': ('django.db.models.fields.TextField', [], {'null': 'True', 'blank': 'True'}),
'full_text_url': ('django.db.models.fields.URLField', [], {'max_length': '1024', 'null': 'True', 'blank': 'True'}),
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'importance': ('django.db.models.fields.FloatField', [], {}),
'meeting_number': ('django.db.models.fields.IntegerField', [], {'null': 'True', 'blank': 'True'}),
'src_id': ('django.db.models.fields.IntegerField', [], {'null': 'True', 'blank': 'True'}),
'src_url': ('django.db.models.fields.URLField', [], {'max_length': '1024', 'null': 'True', 'blank': 'True'}),
'summary': ('django.db.models.fields.TextField', [], {'null': 'True', 'blank': 'True'}),
'time': ('django.db.models.fields.DateTimeField', [], {}),
'time_string': ('django.db.models.fields.CharField', [], {'max_length': '100'}),
'title': ('django.db.models.fields.CharField', [], {'max_length': '1000'}),
'vote_number': ('django.db.models.fields.IntegerField', [], {'null': 'True', 'blank': 'True'}),
'votes': ('django.db.models.fields.related.ManyToManyField', [], {'symmetrical': 'False', 'related_name': "'votes'", 'blank': 'True', 'through': "orm['laws.VoteAction']", 'to': "orm['mks.Member']"}),
'votes_count': ('django.db.models.fields.IntegerField', [], {'null': 'True', 'blank': 'True'})
},
'laws.voteaction': {
'Meta': {'object_name': 'VoteAction'},
'against_coalition': ('django.db.models.fields.BooleanField', [], {'default': 'False', 'blank': 'True'}),
'against_opposition': ('django.db.models.fields.BooleanField', [], {'default': 'False', 'blank': 'True'}),
'against_party': ('django.db.models.fields.BooleanField', [], {'default': 'False', 'blank': 'True'}),
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'member': ('django.db.models.fields.related.ForeignKey', [], {'to': "orm['mks.Member']"}),
'type': ('django.db.models.fields.CharField', [], {'max_length': '10'}),
'vote': ('django.db.models.fields.related.ForeignKey', [], {'to': "orm['laws.Vote']"})
},
'links.link': {
'Meta': {'object_name': 'Link'},
'content_type': ('django.db.models.fields.related.ForeignKey', [], {'related_name': "'content_type_set_for_link'", 'to': "orm['contenttypes.ContentType']"}),
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'link_type': ('django.db.models.fields.related.ForeignKey', [], {'to': "orm['links.LinkType']", 'null': 'True', 'blank': 'True'}),
'object_pk': ('django.db.models.fields.TextField', [], {}),
'title': ('django.db.models.fields.CharField', [], {'max_length': '200'}),
'url': ('django.db.models.fields.URLField', [], {'max_length': '200'})
},
'links.linktype': {
'Meta': {'object_name': 'LinkType'},
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'image': ('django.db.models.fields.files.ImageField', [], {'max_length': '100'}),
'title': ('django.db.models.fields.CharField', [], {'max_length': '200'})
},
'mks.correlation': {
'Meta': {'object_name': 'Correlation'},
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'm1': ('django.db.models.fields.related.ForeignKey', [], {'related_name': "'m1'", 'to': "orm['mks.Member']"}),
'm2': ('django.db.models.fields.related.ForeignKey', [], {'related_name': "'m2'", 'to': "orm['mks.Member']"}),
'normalized_score': ('django.db.models.fields.FloatField', [], {'null': 'True'}),
'not_same_party': ('django.db.models.fields.NullBooleanField', [], {'null': 'True', 'blank': 'True'}),
'score': ('django.db.models.fields.IntegerField', [], {'default': '0'})
},
'mks.member': {
'Meta': {'object_name': 'Member'},
'area_of_residence': ('django.db.models.fields.CharField', [], {'max_length': '100', 'null': 'True', 'blank': 'True'}),
'blog': ('django.db.models.fields.related.OneToOneField', [], {'to': "orm['planet.Blog']", 'unique': 'True', 'null': 'True', 'blank': 'True'}),
'current_party': ('django.db.models.fields.related.ForeignKey', [], {'blank': 'True', 'related_name': "'members'", 'null': 'True', 'to': "orm['mks.Party']"}),
'date_of_birth': ('django.db.models.fields.DateField', [], {'null': 'True', 'blank': 'True'}),
'date_of_death': ('django.db.models.fields.DateField', [], {'null': 'True', 'blank': 'True'}),
'email': ('django.db.models.fields.EmailField', [], {'max_length': '75', 'null': 'True', 'blank': 'True'}),
'end_date': ('django.db.models.fields.DateField', [], {'null': 'True', 'blank': 'True'}),
'family_status': ('django.db.models.fields.CharField', [], {'max_length': '10', 'null': 'True', 'blank': 'True'}),
'fax': ('django.db.models.fields.CharField', [], {'max_length': '20', 'null': 'True', 'blank': 'True'}),
'gender': ('django.db.models.fields.CharField', [], {'max_length': '1', 'null': 'True'}),
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'img_url': ('django.db.models.fields.URLField', [], {'max_length': '200', 'blank': 'True'}),
'is_current': ('django.db.models.fields.BooleanField', [], {'default': 'True', 'blank': 'True'}),
'name': ('django.db.models.fields.CharField', [], {'max_length': '64'}),
'number_of_children': ('django.db.models.fields.IntegerField', [], {'null': 'True', 'blank': 'True'}),
'parties': ('django.db.models.fields.related.ManyToManyField', [], {'related_name': "'all_members'", 'symmetrical': 'False', 'through': "orm['mks.Membership']", 'to': "orm['mks.Party']"}),
'phone': ('django.db.models.fields.CharField', [], {'max_length': '20', 'null': 'True', 'blank': 'True'}),
'place_of_birth': ('django.db.models.fields.CharField', [], {'max_length': '100', 'null': 'True', 'blank': 'True'}),
'place_of_residence': ('django.db.models.fields.CharField', [], {'max_length': '100', 'null': 'True', 'blank': 'True'}),
'start_date': ('django.db.models.fields.DateField', [], {'null': 'True', 'blank': 'True'}),
'user': ('django.db.models.fields.related.ForeignKey', [], {'to': "orm['auth.User']", 'null': 'True'}),
'website': ('django.db.models.fields.URLField', [], {'max_length': '200', 'null': 'True', 'blank': 'True'}),
'year_of_aliyah': ('django.db.models.fields.IntegerField', [], {'null': 'True', 'blank': 'True'})
},
'mks.membership': {
'Meta': {'object_name': 'Membership'},
'end_date': ('django.db.models.fields.DateField', [], {'null': 'True', 'blank': 'True'}),
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'member': ('django.db.models.fields.related.ForeignKey', [], {'to': "orm['mks.Member']"}),
'party': ('django.db.models.fields.related.ForeignKey', [], {'to': "orm['mks.Party']"}),
'start_date': ('django.db.models.fields.DateField', [], {'null': 'True', 'blank': 'True'})
},
'mks.party': {
'Meta': {'object_name': 'Party'},
'end_date': ('django.db.models.fields.DateField', [], {'null': 'True', 'blank': 'True'}),
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'is_coalition': ('django.db.models.fields.BooleanField', [], {'default': 'False', 'blank': 'True'}),
'name': ('django.db.models.fields.CharField', [], {'max_length': '64'}),
'number_of_members': ('django.db.models.fields.IntegerField', [], {'null': 'True', 'blank': 'True'}),
'number_of_seats': ('django.db.models.fields.IntegerField', [], {'null': 'True', 'blank': 'True'}),
'start_date': ('django.db.models.fields.DateField', [], {'null': 'True', 'blank': 'True'})
},
'mks.weeklypresence': {
'Meta': {'object_name': 'WeeklyPresence'},
'date': ('django.db.models.fields.DateField', [], {'null': 'True', 'blank': 'True'}),
'hours': ('django.db.models.fields.FloatField', [], {'blank': 'True'}),
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'member': ('django.db.models.fields.related.ForeignKey', [], {'to': "orm['mks.Member']"})
},
'planet.author': {
'Meta': {'object_name': 'Author'},
'email': ('django.db.models.fields.EmailField', [], {'max_length': '75', 'blank': 'True'}),
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'name': ('django.db.models.fields.CharField', [], {'db_index': 'True', 'max_length': '255', 'null': 'True', 'blank': 'True'}),
'profile_url': ('django.db.models.fields.URLField', [], {'max_length': '200', 'null': 'True', 'blank': 'True'})
},
'planet.blog': {
'Meta': {'object_name': 'Blog'},
'date_created': ('django.db.models.fields.DateTimeField', [], {'auto_now_add': 'True', 'blank': 'True'}),
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'title': ('django.db.models.fields.CharField', [], {'db_index': 'True', 'max_length': '255', 'blank': 'True'}),
'url': ('django.db.models.fields.URLField', [], {'unique': 'True', 'max_length': '200', 'db_index': 'True'})
},
'planet.enclosure': {
'Meta': {'object_name': 'Enclosure'},
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'length': ('django.db.models.fields.CharField', [], {'max_length': '20'}),
'link': ('django.db.models.fields.URLField', [], {'max_length': '500', 'db_index': 'True'}),
'mime_type': ('django.db.models.fields.CharField', [], {'max_length': '50', 'db_index': 'True'}),
'post': ('django.db.models.fields.related.ForeignKey', [], {'to': "orm['planet.Post']"})
},
'planet.feed': {
'Meta': {'object_name': 'Feed'},
'blog': ('django.db.models.fields.related.ForeignKey', [], {'to': "orm['planet.Blog']", 'null': 'True', 'blank': 'True'}),
'etag': ('django.db.models.fields.CharField', [], {'db_index': 'True', 'max_length': '50', 'null': 'True', 'blank': 'True'}),
'generator': ('django.db.models.fields.related.ForeignKey', [], {'to': "orm['planet.Generator']", 'null': 'True', 'blank': 'True'}),
'guid': ('django.db.models.fields.CharField', [], {'db_index': 'True', 'max_length': '255', 'null': 'True', 'blank': 'True'}),
'icon_url': ('django.db.models.fields.URLField', [], {'max_length': '200', 'null': 'True', 'blank': 'True'}),
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'image_url': ('django.db.models.fields.URLField', [], {'max_length': '200', 'null': 'True', 'blank': 'True'}),
'info': ('django.db.models.fields.CharField', [], {'max_length': '255', 'null': 'True', 'blank': 'True'}),
'is_active': ('django.db.models.fields.BooleanField', [], {'default': 'True', 'db_index': 'True', 'blank': 'True'}),
'language': ('django.db.models.fields.CharField', [], {'max_length': '50', 'null': 'True', 'blank': 'True'}),
'last_checked': ('django.db.models.fields.DateTimeField', [], {'null': 'True', 'blank': 'True'}),
'last_modified': ('django.db.models.fields.DateTimeField', [], {'db_index': 'True', 'null': 'True', 'blank': 'True'}),
'rights': ('django.db.models.fields.CharField', [], {'max_length': '255', 'null': 'True', 'blank': 'True'}),
'site': ('django.db.models.fields.related.ForeignKey', [], {'to': "orm['sites.Site']", 'null': 'True', 'blank': 'True'}),
'subtitle': ('django.db.models.fields.TextField', [], {'null': 'True', 'blank': 'True'}),
'title': ('django.db.models.fields.CharField', [], {'db_index': 'True', 'max_length': '255', 'null': 'True', 'blank': 'True'}),
'url': ('django.db.models.fields.URLField', [], {'unique': 'True', 'max_length': '200', 'db_index': 'True'})
},
'planet.feedlink': {
'Meta': {'object_name': 'FeedLink'},
'feed': ('django.db.models.fields.related.ForeignKey', [], {'to': "orm['planet.Feed']"}),
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'link': ('django.db.models.fields.URLField', [], {'max_length': '500', 'db_index': 'True'}),
'mime_type': ('django.db.models.fields.CharField', [], {'max_length': '50', 'db_index': 'True'}),
'rel': ('django.db.models.fields.CharField', [], {'max_length': '50', 'db_index': 'True'})
},
'planet.generator': {
'Meta': {'unique_together': "(('name', 'link', 'version'),)", 'object_name': 'Generator'},
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'link': ('django.db.models.fields.URLField', [], {'max_length': '200', 'null': 'True', 'blank': 'True'}),
'name': ('django.db.models.fields.CharField', [], {'max_length': '100'}),
'version': ('django.db.models.fields.CharField', [], {'max_length': '5', 'null': 'True', 'blank': 'True'})
},
'planet.post': {
'Meta': {'unique_together': "(('feed', 'guid'),)", 'object_name': 'Post'},
'authors': ('django.db.models.fields.related.ManyToManyField', [], {'to': "orm['planet.Author']", 'through': "orm['planet.PostAuthorData']", 'symmetrical': 'False'}),
'comments_url': ('django.db.models.fields.URLField', [], {'max_length': '200', 'null': 'True', 'blank': 'True'}),
'content': ('django.db.models.fields.TextField', [], {}),
'date_created': ('django.db.models.fields.DateField', [], {'auto_now_add': 'True', 'blank': 'True'}),
'date_modified': ('django.db.models.fields.DateTimeField', [], {'db_index': 'True', 'null': 'True', 'blank': 'True'}),
'feed': ('django.db.models.fields.related.ForeignKey', [], {'to': "orm['planet.Feed']"}),
'guid': ('django.db.models.fields.CharField', [], {'max_length': '200', 'db_index': 'True'}),
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'title': ('django.db.models.fields.CharField', [], {'max_length': '255', 'db_index': 'True'}),
'url': ('django.db.models.fields.URLField', [], {'max_length': '200', 'db_index': 'True'})
},
'planet.postauthordata': {
'Meta': {'object_name': 'PostAuthorData'},
'author': ('django.db.models.fields.related.ForeignKey', [], {'to': "orm['planet.Author']"}),
'date_created': ('django.db.models.fields.DateField', [], {'auto_now_add': 'True', 'blank': 'True'}),
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'is_contributor': ('django.db.models.fields.BooleanField', [], {'default': 'False', 'blank': 'True'}),
'post': ('django.db.models.fields.related.ForeignKey', [], {'to': "orm['planet.Post']"})
},
'planet.postlink': {
'Meta': {'object_name': 'PostLink'},
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'link': ('django.db.models.fields.URLField', [], {'max_length': '500', 'db_index': 'True'}),
'mime_type': ('django.db.models.fields.CharField', [], {'max_length': '50', 'db_index': 'True'}),
'post': ('django.db.models.fields.related.ForeignKey', [], {'to': "orm['planet.Post']"}),
'rel': ('django.db.models.fields.CharField', [], {'max_length': '50', 'db_index': 'True'}),
'title': ('django.db.models.fields.CharField', [], {'max_length': '255', 'db_index': 'True'})
},
'sites.site': {
'Meta': {'object_name': 'Site', 'db_table': "'django_site'"},
'domain': ('django.db.models.fields.CharField', [], {'max_length': '100'}),
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'name': ('django.db.models.fields.CharField', [], {'max_length': '50'})
},
'tagging.tag': {
'Meta': {'object_name': 'Tag'},
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'name': ('django.db.models.fields.CharField', [], {'unique': 'True', 'max_length': '50', 'db_index': 'True'})
},
'tagging.taggeditem': {
'Meta': {'unique_together': "(('tag', 'content_type', 'object_id'),)", 'object_name': 'TaggedItem'},
'content_type': ('django.db.models.fields.related.ForeignKey', [], {'to': "orm['contenttypes.ContentType']"}),
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'object_id': ('django.db.models.fields.PositiveIntegerField', [], {'db_index': 'True'}),
'tag': ('django.db.models.fields.related.ForeignKey', [], {'related_name': "'items'", 'to': "orm['tagging.Tag']"})
}
}
complete_apps = ['planet', 'committees', 'laws', 'links', 'mks']
| 80.294118
| 243
| 0.553811
| 2,945
| 28,665
| 5.276401
| 0.094737
| 0.108115
| 0.1883
| 0.269001
| 0.762018
| 0.734024
| 0.717421
| 0.700045
| 0.631379
| 0.515928
| 0
| 0.009253
| 0.193197
| 28,665
| 356
| 244
| 80.519663
| 0.662645
| 0.000523
| 0
| 0.207602
| 0
| 0
| 0.570651
| 0.293424
| 0
| 0
| 0
| 0
| 0
| 0
| null | null | 0.002924
| 0.020468
| null | null | 0.008772
| 0
| 0
| 0
| null | 0
| 1
| 1
| 0
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 6
|
c791e66766588159e27d9948cc9cb108cb9edf98
| 144
|
py
|
Python
|
world_capitals_bot/data_models/user_profile.py
|
gcatanese/worldCapitalsChatbot
|
09c9d32d9ce6da3bba84cdd24f24539e0b215611
|
[
"Apache-2.0"
] | null | null | null |
world_capitals_bot/data_models/user_profile.py
|
gcatanese/worldCapitalsChatbot
|
09c9d32d9ce6da3bba84cdd24f24539e0b215611
|
[
"Apache-2.0"
] | null | null | null |
world_capitals_bot/data_models/user_profile.py
|
gcatanese/worldCapitalsChatbot
|
09c9d32d9ce6da3bba84cdd24f24539e0b215611
|
[
"Apache-2.0"
] | null | null | null |
class UserProfile:
def __init__(self, name: str = None):
self.name = name
def __str__(self):
return f"name:{self.name}"
| 24
| 41
| 0.604167
| 19
| 144
| 4.157895
| 0.526316
| 0.303797
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.270833
| 144
| 6
| 42
| 24
| 0.752381
| 0
| 0
| 0
| 0
| 0
| 0.110345
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0.4
| false
| 0
| 0
| 0.2
| 0.8
| 0
| 1
| 0
| 0
| null | 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 1
| 1
| 0
|
0
| 6
|
c7cfa30b84e6da4b9c34b221ad846f4f15a039d6
| 198,121
|
py
|
Python
|
cottonformation/res/glue.py
|
gitter-badger/cottonformation-project
|
354f1dce7ea106e209af2d5d818b6033a27c193c
|
[
"BSD-2-Clause"
] | null | null | null |
cottonformation/res/glue.py
|
gitter-badger/cottonformation-project
|
354f1dce7ea106e209af2d5d818b6033a27c193c
|
[
"BSD-2-Clause"
] | null | null | null |
cottonformation/res/glue.py
|
gitter-badger/cottonformation-project
|
354f1dce7ea106e209af2d5d818b6033a27c193c
|
[
"BSD-2-Clause"
] | null | null | null |
# -*- coding: utf-8 -*-
"""
This module
"""
import attr
import typing
from ..core.model import (
Property, Resource, Tag, GetAtt, TypeHint, TypeCheck,
)
from ..core.constant import AttrMeta
#--- Property declaration ---
@attr.s
class MLTransformFindMatchesParameters(Property):
"""
AWS Object Type = "AWS::Glue::MLTransform.FindMatchesParameters"
Resource Document: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-properties-glue-mltransform-transformparameters-findmatchesparameters.html
Property Document:
- ``rp_PrimaryKeyColumnName``: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-properties-glue-mltransform-transformparameters-findmatchesparameters.html#cfn-glue-mltransform-transformparameters-findmatchesparameters-primarykeycolumnname
- ``p_AccuracyCostTradeoff``: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-properties-glue-mltransform-transformparameters-findmatchesparameters.html#cfn-glue-mltransform-transformparameters-findmatchesparameters-accuracycosttradeoff
- ``p_EnforceProvidedLabels``: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-properties-glue-mltransform-transformparameters-findmatchesparameters.html#cfn-glue-mltransform-transformparameters-findmatchesparameters-enforceprovidedlabels
- ``p_PrecisionRecallTradeoff``: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-properties-glue-mltransform-transformparameters-findmatchesparameters.html#cfn-glue-mltransform-transformparameters-findmatchesparameters-precisionrecalltradeoff
"""
AWS_OBJECT_TYPE = "AWS::Glue::MLTransform.FindMatchesParameters"
rp_PrimaryKeyColumnName: TypeHint.intrinsic_str = attr.ib(
default=None,
validator=attr.validators.instance_of(TypeCheck.intrinsic_str_type),
metadata={AttrMeta.PROPERTY_NAME: "PrimaryKeyColumnName"},
)
"""Doc: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-properties-glue-mltransform-transformparameters-findmatchesparameters.html#cfn-glue-mltransform-transformparameters-findmatchesparameters-primarykeycolumnname"""
p_AccuracyCostTradeoff: float = attr.ib(
default=None,
validator=attr.validators.optional(attr.validators.instance_of(float)),
metadata={AttrMeta.PROPERTY_NAME: "AccuracyCostTradeoff"},
)
"""Doc: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-properties-glue-mltransform-transformparameters-findmatchesparameters.html#cfn-glue-mltransform-transformparameters-findmatchesparameters-accuracycosttradeoff"""
p_EnforceProvidedLabels: bool = attr.ib(
default=None,
validator=attr.validators.optional(attr.validators.instance_of(bool)),
metadata={AttrMeta.PROPERTY_NAME: "EnforceProvidedLabels"},
)
"""Doc: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-properties-glue-mltransform-transformparameters-findmatchesparameters.html#cfn-glue-mltransform-transformparameters-findmatchesparameters-enforceprovidedlabels"""
p_PrecisionRecallTradeoff: float = attr.ib(
default=None,
validator=attr.validators.optional(attr.validators.instance_of(float)),
metadata={AttrMeta.PROPERTY_NAME: "PrecisionRecallTradeoff"},
)
"""Doc: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-properties-glue-mltransform-transformparameters-findmatchesparameters.html#cfn-glue-mltransform-transformparameters-findmatchesparameters-precisionrecalltradeoff"""
@attr.s
class JobJobCommand(Property):
"""
AWS Object Type = "AWS::Glue::Job.JobCommand"
Resource Document: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-properties-glue-job-jobcommand.html
Property Document:
- ``p_Name``: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-properties-glue-job-jobcommand.html#cfn-glue-job-jobcommand-name
- ``p_PythonVersion``: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-properties-glue-job-jobcommand.html#cfn-glue-job-jobcommand-pythonversion
- ``p_ScriptLocation``: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-properties-glue-job-jobcommand.html#cfn-glue-job-jobcommand-scriptlocation
"""
AWS_OBJECT_TYPE = "AWS::Glue::Job.JobCommand"
p_Name: TypeHint.intrinsic_str = attr.ib(
default=None,
validator=attr.validators.optional(attr.validators.instance_of(TypeCheck.intrinsic_str_type)),
metadata={AttrMeta.PROPERTY_NAME: "Name"},
)
"""Doc: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-properties-glue-job-jobcommand.html#cfn-glue-job-jobcommand-name"""
p_PythonVersion: TypeHint.intrinsic_str = attr.ib(
default=None,
validator=attr.validators.optional(attr.validators.instance_of(TypeCheck.intrinsic_str_type)),
metadata={AttrMeta.PROPERTY_NAME: "PythonVersion"},
)
"""Doc: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-properties-glue-job-jobcommand.html#cfn-glue-job-jobcommand-pythonversion"""
p_ScriptLocation: TypeHint.intrinsic_str = attr.ib(
default=None,
validator=attr.validators.optional(attr.validators.instance_of(TypeCheck.intrinsic_str_type)),
metadata={AttrMeta.PROPERTY_NAME: "ScriptLocation"},
)
"""Doc: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-properties-glue-job-jobcommand.html#cfn-glue-job-jobcommand-scriptlocation"""
@attr.s
class CrawlerCatalogTarget(Property):
"""
AWS Object Type = "AWS::Glue::Crawler.CatalogTarget"
Resource Document: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-properties-glue-crawler-catalogtarget.html
Property Document:
- ``p_DatabaseName``: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-properties-glue-crawler-catalogtarget.html#cfn-glue-crawler-catalogtarget-databasename
- ``p_Tables``: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-properties-glue-crawler-catalogtarget.html#cfn-glue-crawler-catalogtarget-tables
"""
AWS_OBJECT_TYPE = "AWS::Glue::Crawler.CatalogTarget"
p_DatabaseName: TypeHint.intrinsic_str = attr.ib(
default=None,
validator=attr.validators.optional(attr.validators.instance_of(TypeCheck.intrinsic_str_type)),
metadata={AttrMeta.PROPERTY_NAME: "DatabaseName"},
)
"""Doc: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-properties-glue-crawler-catalogtarget.html#cfn-glue-crawler-catalogtarget-databasename"""
p_Tables: typing.List[TypeHint.intrinsic_str] = attr.ib(
default=None,
validator=attr.validators.optional(attr.validators.deep_iterable(member_validator=attr.validators.instance_of(TypeCheck.intrinsic_str_type), iterable_validator=attr.validators.instance_of(list))),
metadata={AttrMeta.PROPERTY_NAME: "Tables"},
)
"""Doc: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-properties-glue-crawler-catalogtarget.html#cfn-glue-crawler-catalogtarget-tables"""
@attr.s
class ConnectionPhysicalConnectionRequirements(Property):
"""
AWS Object Type = "AWS::Glue::Connection.PhysicalConnectionRequirements"
Resource Document: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-properties-glue-connection-physicalconnectionrequirements.html
Property Document:
- ``p_AvailabilityZone``: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-properties-glue-connection-physicalconnectionrequirements.html#cfn-glue-connection-physicalconnectionrequirements-availabilityzone
- ``p_SecurityGroupIdList``: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-properties-glue-connection-physicalconnectionrequirements.html#cfn-glue-connection-physicalconnectionrequirements-securitygroupidlist
- ``p_SubnetId``: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-properties-glue-connection-physicalconnectionrequirements.html#cfn-glue-connection-physicalconnectionrequirements-subnetid
"""
AWS_OBJECT_TYPE = "AWS::Glue::Connection.PhysicalConnectionRequirements"
p_AvailabilityZone: TypeHint.intrinsic_str = attr.ib(
default=None,
validator=attr.validators.optional(attr.validators.instance_of(TypeCheck.intrinsic_str_type)),
metadata={AttrMeta.PROPERTY_NAME: "AvailabilityZone"},
)
"""Doc: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-properties-glue-connection-physicalconnectionrequirements.html#cfn-glue-connection-physicalconnectionrequirements-availabilityzone"""
p_SecurityGroupIdList: typing.List[TypeHint.intrinsic_str] = attr.ib(
default=None,
validator=attr.validators.optional(attr.validators.deep_iterable(member_validator=attr.validators.instance_of(TypeCheck.intrinsic_str_type), iterable_validator=attr.validators.instance_of(list))),
metadata={AttrMeta.PROPERTY_NAME: "SecurityGroupIdList"},
)
"""Doc: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-properties-glue-connection-physicalconnectionrequirements.html#cfn-glue-connection-physicalconnectionrequirements-securitygroupidlist"""
p_SubnetId: TypeHint.intrinsic_str = attr.ib(
default=None,
validator=attr.validators.optional(attr.validators.instance_of(TypeCheck.intrinsic_str_type)),
metadata={AttrMeta.PROPERTY_NAME: "SubnetId"},
)
"""Doc: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-properties-glue-connection-physicalconnectionrequirements.html#cfn-glue-connection-physicalconnectionrequirements-subnetid"""
@attr.s
class SchemaVersionSchema(Property):
"""
AWS Object Type = "AWS::Glue::SchemaVersion.Schema"
Resource Document: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-properties-glue-schemaversion-schema.html
Property Document:
- ``p_RegistryName``: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-properties-glue-schemaversion-schema.html#cfn-glue-schemaversion-schema-registryname
- ``p_SchemaArn``: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-properties-glue-schemaversion-schema.html#cfn-glue-schemaversion-schema-schemaarn
- ``p_SchemaName``: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-properties-glue-schemaversion-schema.html#cfn-glue-schemaversion-schema-schemaname
"""
AWS_OBJECT_TYPE = "AWS::Glue::SchemaVersion.Schema"
p_RegistryName: TypeHint.intrinsic_str = attr.ib(
default=None,
validator=attr.validators.optional(attr.validators.instance_of(TypeCheck.intrinsic_str_type)),
metadata={AttrMeta.PROPERTY_NAME: "RegistryName"},
)
"""Doc: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-properties-glue-schemaversion-schema.html#cfn-glue-schemaversion-schema-registryname"""
p_SchemaArn: TypeHint.intrinsic_str = attr.ib(
default=None,
validator=attr.validators.optional(attr.validators.instance_of(TypeCheck.intrinsic_str_type)),
metadata={AttrMeta.PROPERTY_NAME: "SchemaArn"},
)
"""Doc: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-properties-glue-schemaversion-schema.html#cfn-glue-schemaversion-schema-schemaarn"""
p_SchemaName: TypeHint.intrinsic_str = attr.ib(
default=None,
validator=attr.validators.optional(attr.validators.instance_of(TypeCheck.intrinsic_str_type)),
metadata={AttrMeta.PROPERTY_NAME: "SchemaName"},
)
"""Doc: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-properties-glue-schemaversion-schema.html#cfn-glue-schemaversion-schema-schemaname"""
@attr.s
class SchemaSchemaVersion(Property):
"""
AWS Object Type = "AWS::Glue::Schema.SchemaVersion"
Resource Document: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-properties-glue-schema-schemaversion.html
Property Document:
- ``p_IsLatest``: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-properties-glue-schema-schemaversion.html#cfn-glue-schema-schemaversion-islatest
- ``p_VersionNumber``: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-properties-glue-schema-schemaversion.html#cfn-glue-schema-schemaversion-versionnumber
"""
AWS_OBJECT_TYPE = "AWS::Glue::Schema.SchemaVersion"
p_IsLatest: bool = attr.ib(
default=None,
validator=attr.validators.optional(attr.validators.instance_of(bool)),
metadata={AttrMeta.PROPERTY_NAME: "IsLatest"},
)
"""Doc: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-properties-glue-schema-schemaversion.html#cfn-glue-schema-schemaversion-islatest"""
p_VersionNumber: int = attr.ib(
default=None,
validator=attr.validators.optional(attr.validators.instance_of(int)),
metadata={AttrMeta.PROPERTY_NAME: "VersionNumber"},
)
"""Doc: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-properties-glue-schema-schemaversion.html#cfn-glue-schema-schemaversion-versionnumber"""
@attr.s
class CrawlerSchedule(Property):
"""
AWS Object Type = "AWS::Glue::Crawler.Schedule"
Resource Document: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-properties-glue-crawler-schedule.html
Property Document:
- ``p_ScheduleExpression``: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-properties-glue-crawler-schedule.html#cfn-glue-crawler-schedule-scheduleexpression
"""
AWS_OBJECT_TYPE = "AWS::Glue::Crawler.Schedule"
p_ScheduleExpression: TypeHint.intrinsic_str = attr.ib(
default=None,
validator=attr.validators.optional(attr.validators.instance_of(TypeCheck.intrinsic_str_type)),
metadata={AttrMeta.PROPERTY_NAME: "ScheduleExpression"},
)
"""Doc: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-properties-glue-crawler-schedule.html#cfn-glue-crawler-schedule-scheduleexpression"""
@attr.s
class TriggerCondition(Property):
"""
AWS Object Type = "AWS::Glue::Trigger.Condition"
Resource Document: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-properties-glue-trigger-condition.html
Property Document:
- ``p_CrawlState``: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-properties-glue-trigger-condition.html#cfn-glue-trigger-condition-crawlstate
- ``p_CrawlerName``: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-properties-glue-trigger-condition.html#cfn-glue-trigger-condition-crawlername
- ``p_JobName``: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-properties-glue-trigger-condition.html#cfn-glue-trigger-condition-jobname
- ``p_LogicalOperator``: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-properties-glue-trigger-condition.html#cfn-glue-trigger-condition-logicaloperator
- ``p_State``: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-properties-glue-trigger-condition.html#cfn-glue-trigger-condition-state
"""
AWS_OBJECT_TYPE = "AWS::Glue::Trigger.Condition"
p_CrawlState: TypeHint.intrinsic_str = attr.ib(
default=None,
validator=attr.validators.optional(attr.validators.instance_of(TypeCheck.intrinsic_str_type)),
metadata={AttrMeta.PROPERTY_NAME: "CrawlState"},
)
"""Doc: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-properties-glue-trigger-condition.html#cfn-glue-trigger-condition-crawlstate"""
p_CrawlerName: TypeHint.intrinsic_str = attr.ib(
default=None,
validator=attr.validators.optional(attr.validators.instance_of(TypeCheck.intrinsic_str_type)),
metadata={AttrMeta.PROPERTY_NAME: "CrawlerName"},
)
"""Doc: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-properties-glue-trigger-condition.html#cfn-glue-trigger-condition-crawlername"""
p_JobName: TypeHint.intrinsic_str = attr.ib(
default=None,
validator=attr.validators.optional(attr.validators.instance_of(TypeCheck.intrinsic_str_type)),
metadata={AttrMeta.PROPERTY_NAME: "JobName"},
)
"""Doc: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-properties-glue-trigger-condition.html#cfn-glue-trigger-condition-jobname"""
p_LogicalOperator: TypeHint.intrinsic_str = attr.ib(
default=None,
validator=attr.validators.optional(attr.validators.instance_of(TypeCheck.intrinsic_str_type)),
metadata={AttrMeta.PROPERTY_NAME: "LogicalOperator"},
)
"""Doc: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-properties-glue-trigger-condition.html#cfn-glue-trigger-condition-logicaloperator"""
p_State: TypeHint.intrinsic_str = attr.ib(
default=None,
validator=attr.validators.optional(attr.validators.instance_of(TypeCheck.intrinsic_str_type)),
metadata={AttrMeta.PROPERTY_NAME: "State"},
)
"""Doc: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-properties-glue-trigger-condition.html#cfn-glue-trigger-condition-state"""
@attr.s
class DatabaseDatabaseIdentifier(Property):
"""
AWS Object Type = "AWS::Glue::Database.DatabaseIdentifier"
Resource Document: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-properties-glue-database-databaseidentifier.html
Property Document:
- ``p_CatalogId``: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-properties-glue-database-databaseidentifier.html#cfn-glue-database-databaseidentifier-catalogid
- ``p_DatabaseName``: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-properties-glue-database-databaseidentifier.html#cfn-glue-database-databaseidentifier-databasename
"""
AWS_OBJECT_TYPE = "AWS::Glue::Database.DatabaseIdentifier"
p_CatalogId: TypeHint.intrinsic_str = attr.ib(
default=None,
validator=attr.validators.optional(attr.validators.instance_of(TypeCheck.intrinsic_str_type)),
metadata={AttrMeta.PROPERTY_NAME: "CatalogId"},
)
"""Doc: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-properties-glue-database-databaseidentifier.html#cfn-glue-database-databaseidentifier-catalogid"""
p_DatabaseName: TypeHint.intrinsic_str = attr.ib(
default=None,
validator=attr.validators.optional(attr.validators.instance_of(TypeCheck.intrinsic_str_type)),
metadata={AttrMeta.PROPERTY_NAME: "DatabaseName"},
)
"""Doc: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-properties-glue-database-databaseidentifier.html#cfn-glue-database-databaseidentifier-databasename"""
@attr.s
class TableColumn(Property):
"""
AWS Object Type = "AWS::Glue::Table.Column"
Resource Document: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-properties-glue-table-column.html
Property Document:
- ``rp_Name``: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-properties-glue-table-column.html#cfn-glue-table-column-name
- ``p_Comment``: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-properties-glue-table-column.html#cfn-glue-table-column-comment
- ``p_Type``: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-properties-glue-table-column.html#cfn-glue-table-column-type
"""
AWS_OBJECT_TYPE = "AWS::Glue::Table.Column"
rp_Name: TypeHint.intrinsic_str = attr.ib(
default=None,
validator=attr.validators.instance_of(TypeCheck.intrinsic_str_type),
metadata={AttrMeta.PROPERTY_NAME: "Name"},
)
"""Doc: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-properties-glue-table-column.html#cfn-glue-table-column-name"""
p_Comment: TypeHint.intrinsic_str = attr.ib(
default=None,
validator=attr.validators.optional(attr.validators.instance_of(TypeCheck.intrinsic_str_type)),
metadata={AttrMeta.PROPERTY_NAME: "Comment"},
)
"""Doc: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-properties-glue-table-column.html#cfn-glue-table-column-comment"""
p_Type: TypeHint.intrinsic_str = attr.ib(
default=None,
validator=attr.validators.optional(attr.validators.instance_of(TypeCheck.intrinsic_str_type)),
metadata={AttrMeta.PROPERTY_NAME: "Type"},
)
"""Doc: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-properties-glue-table-column.html#cfn-glue-table-column-type"""
@attr.s
class DataCatalogEncryptionSettingsEncryptionAtRest(Property):
"""
AWS Object Type = "AWS::Glue::DataCatalogEncryptionSettings.EncryptionAtRest"
Resource Document: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-properties-glue-datacatalogencryptionsettings-encryptionatrest.html
Property Document:
- ``p_CatalogEncryptionMode``: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-properties-glue-datacatalogencryptionsettings-encryptionatrest.html#cfn-glue-datacatalogencryptionsettings-encryptionatrest-catalogencryptionmode
- ``p_SseAwsKmsKeyId``: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-properties-glue-datacatalogencryptionsettings-encryptionatrest.html#cfn-glue-datacatalogencryptionsettings-encryptionatrest-sseawskmskeyid
"""
AWS_OBJECT_TYPE = "AWS::Glue::DataCatalogEncryptionSettings.EncryptionAtRest"
p_CatalogEncryptionMode: TypeHint.intrinsic_str = attr.ib(
default=None,
validator=attr.validators.optional(attr.validators.instance_of(TypeCheck.intrinsic_str_type)),
metadata={AttrMeta.PROPERTY_NAME: "CatalogEncryptionMode"},
)
"""Doc: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-properties-glue-datacatalogencryptionsettings-encryptionatrest.html#cfn-glue-datacatalogencryptionsettings-encryptionatrest-catalogencryptionmode"""
p_SseAwsKmsKeyId: TypeHint.intrinsic_str = attr.ib(
default=None,
validator=attr.validators.optional(attr.validators.instance_of(TypeCheck.intrinsic_str_type)),
metadata={AttrMeta.PROPERTY_NAME: "SseAwsKmsKeyId"},
)
"""Doc: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-properties-glue-datacatalogencryptionsettings-encryptionatrest.html#cfn-glue-datacatalogencryptionsettings-encryptionatrest-sseawskmskeyid"""
@attr.s
class CrawlerSchemaChangePolicy(Property):
"""
AWS Object Type = "AWS::Glue::Crawler.SchemaChangePolicy"
Resource Document: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-properties-glue-crawler-schemachangepolicy.html
Property Document:
- ``p_DeleteBehavior``: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-properties-glue-crawler-schemachangepolicy.html#cfn-glue-crawler-schemachangepolicy-deletebehavior
- ``p_UpdateBehavior``: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-properties-glue-crawler-schemachangepolicy.html#cfn-glue-crawler-schemachangepolicy-updatebehavior
"""
AWS_OBJECT_TYPE = "AWS::Glue::Crawler.SchemaChangePolicy"
p_DeleteBehavior: TypeHint.intrinsic_str = attr.ib(
default=None,
validator=attr.validators.optional(attr.validators.instance_of(TypeCheck.intrinsic_str_type)),
metadata={AttrMeta.PROPERTY_NAME: "DeleteBehavior"},
)
"""Doc: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-properties-glue-crawler-schemachangepolicy.html#cfn-glue-crawler-schemachangepolicy-deletebehavior"""
p_UpdateBehavior: TypeHint.intrinsic_str = attr.ib(
default=None,
validator=attr.validators.optional(attr.validators.instance_of(TypeCheck.intrinsic_str_type)),
metadata={AttrMeta.PROPERTY_NAME: "UpdateBehavior"},
)
"""Doc: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-properties-glue-crawler-schemachangepolicy.html#cfn-glue-crawler-schemachangepolicy-updatebehavior"""
@attr.s
class MLTransformMLUserDataEncryption(Property):
"""
AWS Object Type = "AWS::Glue::MLTransform.MLUserDataEncryption"
Resource Document: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-properties-glue-mltransform-transformencryption-mluserdataencryption.html
Property Document:
- ``rp_MLUserDataEncryptionMode``: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-properties-glue-mltransform-transformencryption-mluserdataencryption.html#cfn-glue-mltransform-transformencryption-mluserdataencryption-mluserdataencryptionmode
- ``p_KmsKeyId``: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-properties-glue-mltransform-transformencryption-mluserdataencryption.html#cfn-glue-mltransform-transformencryption-mluserdataencryption-kmskeyid
"""
AWS_OBJECT_TYPE = "AWS::Glue::MLTransform.MLUserDataEncryption"
rp_MLUserDataEncryptionMode: TypeHint.intrinsic_str = attr.ib(
default=None,
validator=attr.validators.instance_of(TypeCheck.intrinsic_str_type),
metadata={AttrMeta.PROPERTY_NAME: "MLUserDataEncryptionMode"},
)
"""Doc: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-properties-glue-mltransform-transformencryption-mluserdataencryption.html#cfn-glue-mltransform-transformencryption-mluserdataencryption-mluserdataencryptionmode"""
p_KmsKeyId: TypeHint.intrinsic_str = attr.ib(
default=None,
validator=attr.validators.optional(attr.validators.instance_of(TypeCheck.intrinsic_str_type)),
metadata={AttrMeta.PROPERTY_NAME: "KmsKeyId"},
)
"""Doc: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-properties-glue-mltransform-transformencryption-mluserdataencryption.html#cfn-glue-mltransform-transformencryption-mluserdataencryption-kmskeyid"""
@attr.s
class ClassifierCsvClassifier(Property):
"""
AWS Object Type = "AWS::Glue::Classifier.CsvClassifier"
Resource Document: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-properties-glue-classifier-csvclassifier.html
Property Document:
- ``p_AllowSingleColumn``: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-properties-glue-classifier-csvclassifier.html#cfn-glue-classifier-csvclassifier-allowsinglecolumn
- ``p_ContainsHeader``: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-properties-glue-classifier-csvclassifier.html#cfn-glue-classifier-csvclassifier-containsheader
- ``p_Delimiter``: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-properties-glue-classifier-csvclassifier.html#cfn-glue-classifier-csvclassifier-delimiter
- ``p_DisableValueTrimming``: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-properties-glue-classifier-csvclassifier.html#cfn-glue-classifier-csvclassifier-disablevaluetrimming
- ``p_Header``: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-properties-glue-classifier-csvclassifier.html#cfn-glue-classifier-csvclassifier-header
- ``p_Name``: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-properties-glue-classifier-csvclassifier.html#cfn-glue-classifier-csvclassifier-name
- ``p_QuoteSymbol``: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-properties-glue-classifier-csvclassifier.html#cfn-glue-classifier-csvclassifier-quotesymbol
"""
AWS_OBJECT_TYPE = "AWS::Glue::Classifier.CsvClassifier"
p_AllowSingleColumn: bool = attr.ib(
default=None,
validator=attr.validators.optional(attr.validators.instance_of(bool)),
metadata={AttrMeta.PROPERTY_NAME: "AllowSingleColumn"},
)
"""Doc: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-properties-glue-classifier-csvclassifier.html#cfn-glue-classifier-csvclassifier-allowsinglecolumn"""
p_ContainsHeader: TypeHint.intrinsic_str = attr.ib(
default=None,
validator=attr.validators.optional(attr.validators.instance_of(TypeCheck.intrinsic_str_type)),
metadata={AttrMeta.PROPERTY_NAME: "ContainsHeader"},
)
"""Doc: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-properties-glue-classifier-csvclassifier.html#cfn-glue-classifier-csvclassifier-containsheader"""
p_Delimiter: TypeHint.intrinsic_str = attr.ib(
default=None,
validator=attr.validators.optional(attr.validators.instance_of(TypeCheck.intrinsic_str_type)),
metadata={AttrMeta.PROPERTY_NAME: "Delimiter"},
)
"""Doc: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-properties-glue-classifier-csvclassifier.html#cfn-glue-classifier-csvclassifier-delimiter"""
p_DisableValueTrimming: bool = attr.ib(
default=None,
validator=attr.validators.optional(attr.validators.instance_of(bool)),
metadata={AttrMeta.PROPERTY_NAME: "DisableValueTrimming"},
)
"""Doc: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-properties-glue-classifier-csvclassifier.html#cfn-glue-classifier-csvclassifier-disablevaluetrimming"""
p_Header: typing.List[TypeHint.intrinsic_str] = attr.ib(
default=None,
validator=attr.validators.optional(attr.validators.deep_iterable(member_validator=attr.validators.instance_of(TypeCheck.intrinsic_str_type), iterable_validator=attr.validators.instance_of(list))),
metadata={AttrMeta.PROPERTY_NAME: "Header"},
)
"""Doc: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-properties-glue-classifier-csvclassifier.html#cfn-glue-classifier-csvclassifier-header"""
p_Name: TypeHint.intrinsic_str = attr.ib(
default=None,
validator=attr.validators.optional(attr.validators.instance_of(TypeCheck.intrinsic_str_type)),
metadata={AttrMeta.PROPERTY_NAME: "Name"},
)
"""Doc: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-properties-glue-classifier-csvclassifier.html#cfn-glue-classifier-csvclassifier-name"""
p_QuoteSymbol: TypeHint.intrinsic_str = attr.ib(
default=None,
validator=attr.validators.optional(attr.validators.instance_of(TypeCheck.intrinsic_str_type)),
metadata={AttrMeta.PROPERTY_NAME: "QuoteSymbol"},
)
"""Doc: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-properties-glue-classifier-csvclassifier.html#cfn-glue-classifier-csvclassifier-quotesymbol"""
@attr.s
class PartitionOrder(Property):
"""
AWS Object Type = "AWS::Glue::Partition.Order"
Resource Document: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-properties-glue-partition-order.html
Property Document:
- ``rp_Column``: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-properties-glue-partition-order.html#cfn-glue-partition-order-column
- ``p_SortOrder``: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-properties-glue-partition-order.html#cfn-glue-partition-order-sortorder
"""
AWS_OBJECT_TYPE = "AWS::Glue::Partition.Order"
rp_Column: TypeHint.intrinsic_str = attr.ib(
default=None,
validator=attr.validators.instance_of(TypeCheck.intrinsic_str_type),
metadata={AttrMeta.PROPERTY_NAME: "Column"},
)
"""Doc: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-properties-glue-partition-order.html#cfn-glue-partition-order-column"""
p_SortOrder: int = attr.ib(
default=None,
validator=attr.validators.optional(attr.validators.instance_of(int)),
metadata={AttrMeta.PROPERTY_NAME: "SortOrder"},
)
"""Doc: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-properties-glue-partition-order.html#cfn-glue-partition-order-sortorder"""
@attr.s
class ClassifierGrokClassifier(Property):
"""
AWS Object Type = "AWS::Glue::Classifier.GrokClassifier"
Resource Document: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-properties-glue-classifier-grokclassifier.html
Property Document:
- ``rp_Classification``: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-properties-glue-classifier-grokclassifier.html#cfn-glue-classifier-grokclassifier-classification
- ``rp_GrokPattern``: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-properties-glue-classifier-grokclassifier.html#cfn-glue-classifier-grokclassifier-grokpattern
- ``p_CustomPatterns``: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-properties-glue-classifier-grokclassifier.html#cfn-glue-classifier-grokclassifier-custompatterns
- ``p_Name``: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-properties-glue-classifier-grokclassifier.html#cfn-glue-classifier-grokclassifier-name
"""
AWS_OBJECT_TYPE = "AWS::Glue::Classifier.GrokClassifier"
rp_Classification: TypeHint.intrinsic_str = attr.ib(
default=None,
validator=attr.validators.instance_of(TypeCheck.intrinsic_str_type),
metadata={AttrMeta.PROPERTY_NAME: "Classification"},
)
"""Doc: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-properties-glue-classifier-grokclassifier.html#cfn-glue-classifier-grokclassifier-classification"""
rp_GrokPattern: TypeHint.intrinsic_str = attr.ib(
default=None,
validator=attr.validators.instance_of(TypeCheck.intrinsic_str_type),
metadata={AttrMeta.PROPERTY_NAME: "GrokPattern"},
)
"""Doc: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-properties-glue-classifier-grokclassifier.html#cfn-glue-classifier-grokclassifier-grokpattern"""
p_CustomPatterns: TypeHint.intrinsic_str = attr.ib(
default=None,
validator=attr.validators.optional(attr.validators.instance_of(TypeCheck.intrinsic_str_type)),
metadata={AttrMeta.PROPERTY_NAME: "CustomPatterns"},
)
"""Doc: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-properties-glue-classifier-grokclassifier.html#cfn-glue-classifier-grokclassifier-custompatterns"""
p_Name: TypeHint.intrinsic_str = attr.ib(
default=None,
validator=attr.validators.optional(attr.validators.instance_of(TypeCheck.intrinsic_str_type)),
metadata={AttrMeta.PROPERTY_NAME: "Name"},
)
"""Doc: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-properties-glue-classifier-grokclassifier.html#cfn-glue-classifier-grokclassifier-name"""
@attr.s
class SchemaRegistry(Property):
"""
AWS Object Type = "AWS::Glue::Schema.Registry"
Resource Document: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-properties-glue-schema-registry.html
Property Document:
- ``p_Arn``: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-properties-glue-schema-registry.html#cfn-glue-schema-registry-arn
- ``p_Name``: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-properties-glue-schema-registry.html#cfn-glue-schema-registry-name
"""
AWS_OBJECT_TYPE = "AWS::Glue::Schema.Registry"
p_Arn: TypeHint.intrinsic_str = attr.ib(
default=None,
validator=attr.validators.optional(attr.validators.instance_of(TypeCheck.intrinsic_str_type)),
metadata={AttrMeta.PROPERTY_NAME: "Arn"},
)
"""Doc: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-properties-glue-schema-registry.html#cfn-glue-schema-registry-arn"""
p_Name: TypeHint.intrinsic_str = attr.ib(
default=None,
validator=attr.validators.optional(attr.validators.instance_of(TypeCheck.intrinsic_str_type)),
metadata={AttrMeta.PROPERTY_NAME: "Name"},
)
"""Doc: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-properties-glue-schema-registry.html#cfn-glue-schema-registry-name"""
@attr.s
class SecurityConfigurationJobBookmarksEncryption(Property):
"""
AWS Object Type = "AWS::Glue::SecurityConfiguration.JobBookmarksEncryption"
Resource Document: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-properties-glue-securityconfiguration-jobbookmarksencryption.html
Property Document:
- ``p_JobBookmarksEncryptionMode``: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-properties-glue-securityconfiguration-jobbookmarksencryption.html#cfn-glue-securityconfiguration-jobbookmarksencryption-jobbookmarksencryptionmode
- ``p_KmsKeyArn``: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-properties-glue-securityconfiguration-jobbookmarksencryption.html#cfn-glue-securityconfiguration-jobbookmarksencryption-kmskeyarn
"""
AWS_OBJECT_TYPE = "AWS::Glue::SecurityConfiguration.JobBookmarksEncryption"
p_JobBookmarksEncryptionMode: TypeHint.intrinsic_str = attr.ib(
default=None,
validator=attr.validators.optional(attr.validators.instance_of(TypeCheck.intrinsic_str_type)),
metadata={AttrMeta.PROPERTY_NAME: "JobBookmarksEncryptionMode"},
)
"""Doc: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-properties-glue-securityconfiguration-jobbookmarksencryption.html#cfn-glue-securityconfiguration-jobbookmarksencryption-jobbookmarksencryptionmode"""
p_KmsKeyArn: TypeHint.intrinsic_str = attr.ib(
default=None,
validator=attr.validators.optional(attr.validators.instance_of(TypeCheck.intrinsic_str_type)),
metadata={AttrMeta.PROPERTY_NAME: "KmsKeyArn"},
)
"""Doc: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-properties-glue-securityconfiguration-jobbookmarksencryption.html#cfn-glue-securityconfiguration-jobbookmarksencryption-kmskeyarn"""
@attr.s
class TableSerdeInfo(Property):
"""
AWS Object Type = "AWS::Glue::Table.SerdeInfo"
Resource Document: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-properties-glue-table-serdeinfo.html
Property Document:
- ``p_Name``: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-properties-glue-table-serdeinfo.html#cfn-glue-table-serdeinfo-name
- ``p_Parameters``: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-properties-glue-table-serdeinfo.html#cfn-glue-table-serdeinfo-parameters
- ``p_SerializationLibrary``: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-properties-glue-table-serdeinfo.html#cfn-glue-table-serdeinfo-serializationlibrary
"""
AWS_OBJECT_TYPE = "AWS::Glue::Table.SerdeInfo"
p_Name: TypeHint.intrinsic_str = attr.ib(
default=None,
validator=attr.validators.optional(attr.validators.instance_of(TypeCheck.intrinsic_str_type)),
metadata={AttrMeta.PROPERTY_NAME: "Name"},
)
"""Doc: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-properties-glue-table-serdeinfo.html#cfn-glue-table-serdeinfo-name"""
p_Parameters: dict = attr.ib(
default=None,
validator=attr.validators.optional(attr.validators.instance_of(dict)),
metadata={AttrMeta.PROPERTY_NAME: "Parameters"},
)
"""Doc: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-properties-glue-table-serdeinfo.html#cfn-glue-table-serdeinfo-parameters"""
p_SerializationLibrary: TypeHint.intrinsic_str = attr.ib(
default=None,
validator=attr.validators.optional(attr.validators.instance_of(TypeCheck.intrinsic_str_type)),
metadata={AttrMeta.PROPERTY_NAME: "SerializationLibrary"},
)
"""Doc: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-properties-glue-table-serdeinfo.html#cfn-glue-table-serdeinfo-serializationlibrary"""
@attr.s
class ClassifierJsonClassifier(Property):
"""
AWS Object Type = "AWS::Glue::Classifier.JsonClassifier"
Resource Document: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-properties-glue-classifier-jsonclassifier.html
Property Document:
- ``rp_JsonPath``: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-properties-glue-classifier-jsonclassifier.html#cfn-glue-classifier-jsonclassifier-jsonpath
- ``p_Name``: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-properties-glue-classifier-jsonclassifier.html#cfn-glue-classifier-jsonclassifier-name
"""
AWS_OBJECT_TYPE = "AWS::Glue::Classifier.JsonClassifier"
rp_JsonPath: TypeHint.intrinsic_str = attr.ib(
default=None,
validator=attr.validators.instance_of(TypeCheck.intrinsic_str_type),
metadata={AttrMeta.PROPERTY_NAME: "JsonPath"},
)
"""Doc: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-properties-glue-classifier-jsonclassifier.html#cfn-glue-classifier-jsonclassifier-jsonpath"""
p_Name: TypeHint.intrinsic_str = attr.ib(
default=None,
validator=attr.validators.optional(attr.validators.instance_of(TypeCheck.intrinsic_str_type)),
metadata={AttrMeta.PROPERTY_NAME: "Name"},
)
"""Doc: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-properties-glue-classifier-jsonclassifier.html#cfn-glue-classifier-jsonclassifier-name"""
@attr.s
class SecurityConfigurationS3Encryptions(Property):
"""
AWS Object Type = "AWS::Glue::SecurityConfiguration.S3Encryptions"
Resource Document: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-properties-glue-securityconfiguration-s3encryptions.html
Property Document:
"""
AWS_OBJECT_TYPE = "AWS::Glue::SecurityConfiguration.S3Encryptions"
@attr.s
class DataCatalogEncryptionSettingsConnectionPasswordEncryption(Property):
"""
AWS Object Type = "AWS::Glue::DataCatalogEncryptionSettings.ConnectionPasswordEncryption"
Resource Document: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-properties-glue-datacatalogencryptionsettings-connectionpasswordencryption.html
Property Document:
- ``p_KmsKeyId``: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-properties-glue-datacatalogencryptionsettings-connectionpasswordencryption.html#cfn-glue-datacatalogencryptionsettings-connectionpasswordencryption-kmskeyid
- ``p_ReturnConnectionPasswordEncrypted``: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-properties-glue-datacatalogencryptionsettings-connectionpasswordencryption.html#cfn-glue-datacatalogencryptionsettings-connectionpasswordencryption-returnconnectionpasswordencrypted
"""
AWS_OBJECT_TYPE = "AWS::Glue::DataCatalogEncryptionSettings.ConnectionPasswordEncryption"
p_KmsKeyId: TypeHint.intrinsic_str = attr.ib(
default=None,
validator=attr.validators.optional(attr.validators.instance_of(TypeCheck.intrinsic_str_type)),
metadata={AttrMeta.PROPERTY_NAME: "KmsKeyId"},
)
"""Doc: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-properties-glue-datacatalogencryptionsettings-connectionpasswordencryption.html#cfn-glue-datacatalogencryptionsettings-connectionpasswordencryption-kmskeyid"""
p_ReturnConnectionPasswordEncrypted: bool = attr.ib(
default=None,
validator=attr.validators.optional(attr.validators.instance_of(bool)),
metadata={AttrMeta.PROPERTY_NAME: "ReturnConnectionPasswordEncrypted"},
)
"""Doc: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-properties-glue-datacatalogencryptionsettings-connectionpasswordencryption.html#cfn-glue-datacatalogencryptionsettings-connectionpasswordencryption-returnconnectionpasswordencrypted"""
@attr.s
class MLTransformGlueTables(Property):
"""
AWS Object Type = "AWS::Glue::MLTransform.GlueTables"
Resource Document: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-properties-glue-mltransform-inputrecordtables-gluetables.html
Property Document:
- ``rp_DatabaseName``: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-properties-glue-mltransform-inputrecordtables-gluetables.html#cfn-glue-mltransform-inputrecordtables-gluetables-databasename
- ``rp_TableName``: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-properties-glue-mltransform-inputrecordtables-gluetables.html#cfn-glue-mltransform-inputrecordtables-gluetables-tablename
- ``p_CatalogId``: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-properties-glue-mltransform-inputrecordtables-gluetables.html#cfn-glue-mltransform-inputrecordtables-gluetables-catalogid
- ``p_ConnectionName``: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-properties-glue-mltransform-inputrecordtables-gluetables.html#cfn-glue-mltransform-inputrecordtables-gluetables-connectionname
"""
AWS_OBJECT_TYPE = "AWS::Glue::MLTransform.GlueTables"
rp_DatabaseName: TypeHint.intrinsic_str = attr.ib(
default=None,
validator=attr.validators.instance_of(TypeCheck.intrinsic_str_type),
metadata={AttrMeta.PROPERTY_NAME: "DatabaseName"},
)
"""Doc: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-properties-glue-mltransform-inputrecordtables-gluetables.html#cfn-glue-mltransform-inputrecordtables-gluetables-databasename"""
rp_TableName: TypeHint.intrinsic_str = attr.ib(
default=None,
validator=attr.validators.instance_of(TypeCheck.intrinsic_str_type),
metadata={AttrMeta.PROPERTY_NAME: "TableName"},
)
"""Doc: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-properties-glue-mltransform-inputrecordtables-gluetables.html#cfn-glue-mltransform-inputrecordtables-gluetables-tablename"""
p_CatalogId: TypeHint.intrinsic_str = attr.ib(
default=None,
validator=attr.validators.optional(attr.validators.instance_of(TypeCheck.intrinsic_str_type)),
metadata={AttrMeta.PROPERTY_NAME: "CatalogId"},
)
"""Doc: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-properties-glue-mltransform-inputrecordtables-gluetables.html#cfn-glue-mltransform-inputrecordtables-gluetables-catalogid"""
p_ConnectionName: TypeHint.intrinsic_str = attr.ib(
default=None,
validator=attr.validators.optional(attr.validators.instance_of(TypeCheck.intrinsic_str_type)),
metadata={AttrMeta.PROPERTY_NAME: "ConnectionName"},
)
"""Doc: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-properties-glue-mltransform-inputrecordtables-gluetables.html#cfn-glue-mltransform-inputrecordtables-gluetables-connectionname"""
@attr.s
class PartitionSerdeInfo(Property):
"""
AWS Object Type = "AWS::Glue::Partition.SerdeInfo"
Resource Document: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-properties-glue-partition-serdeinfo.html
Property Document:
- ``p_Name``: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-properties-glue-partition-serdeinfo.html#cfn-glue-partition-serdeinfo-name
- ``p_Parameters``: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-properties-glue-partition-serdeinfo.html#cfn-glue-partition-serdeinfo-parameters
- ``p_SerializationLibrary``: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-properties-glue-partition-serdeinfo.html#cfn-glue-partition-serdeinfo-serializationlibrary
"""
AWS_OBJECT_TYPE = "AWS::Glue::Partition.SerdeInfo"
p_Name: TypeHint.intrinsic_str = attr.ib(
default=None,
validator=attr.validators.optional(attr.validators.instance_of(TypeCheck.intrinsic_str_type)),
metadata={AttrMeta.PROPERTY_NAME: "Name"},
)
"""Doc: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-properties-glue-partition-serdeinfo.html#cfn-glue-partition-serdeinfo-name"""
p_Parameters: dict = attr.ib(
default=None,
validator=attr.validators.optional(attr.validators.instance_of(dict)),
metadata={AttrMeta.PROPERTY_NAME: "Parameters"},
)
"""Doc: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-properties-glue-partition-serdeinfo.html#cfn-glue-partition-serdeinfo-parameters"""
p_SerializationLibrary: TypeHint.intrinsic_str = attr.ib(
default=None,
validator=attr.validators.optional(attr.validators.instance_of(TypeCheck.intrinsic_str_type)),
metadata={AttrMeta.PROPERTY_NAME: "SerializationLibrary"},
)
"""Doc: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-properties-glue-partition-serdeinfo.html#cfn-glue-partition-serdeinfo-serializationlibrary"""
@attr.s
class SecurityConfigurationS3Encryption(Property):
"""
AWS Object Type = "AWS::Glue::SecurityConfiguration.S3Encryption"
Resource Document: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-properties-glue-securityconfiguration-s3encryption.html
Property Document:
- ``p_KmsKeyArn``: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-properties-glue-securityconfiguration-s3encryption.html#cfn-glue-securityconfiguration-s3encryption-kmskeyarn
- ``p_S3EncryptionMode``: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-properties-glue-securityconfiguration-s3encryption.html#cfn-glue-securityconfiguration-s3encryption-s3encryptionmode
"""
AWS_OBJECT_TYPE = "AWS::Glue::SecurityConfiguration.S3Encryption"
p_KmsKeyArn: TypeHint.intrinsic_str = attr.ib(
default=None,
validator=attr.validators.optional(attr.validators.instance_of(TypeCheck.intrinsic_str_type)),
metadata={AttrMeta.PROPERTY_NAME: "KmsKeyArn"},
)
"""Doc: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-properties-glue-securityconfiguration-s3encryption.html#cfn-glue-securityconfiguration-s3encryption-kmskeyarn"""
p_S3EncryptionMode: TypeHint.intrinsic_str = attr.ib(
default=None,
validator=attr.validators.optional(attr.validators.instance_of(TypeCheck.intrinsic_str_type)),
metadata={AttrMeta.PROPERTY_NAME: "S3EncryptionMode"},
)
"""Doc: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-properties-glue-securityconfiguration-s3encryption.html#cfn-glue-securityconfiguration-s3encryption-s3encryptionmode"""
@attr.s
class MLTransformTransformEncryption(Property):
"""
AWS Object Type = "AWS::Glue::MLTransform.TransformEncryption"
Resource Document: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-properties-glue-mltransform-transformencryption.html
Property Document:
- ``p_MLUserDataEncryption``: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-properties-glue-mltransform-transformencryption.html#cfn-glue-mltransform-transformencryption-mluserdataencryption
- ``p_TaskRunSecurityConfigurationName``: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-properties-glue-mltransform-transformencryption.html#cfn-glue-mltransform-transformencryption-taskrunsecurityconfigurationname
"""
AWS_OBJECT_TYPE = "AWS::Glue::MLTransform.TransformEncryption"
p_MLUserDataEncryption: typing.Union['MLTransformMLUserDataEncryption', dict] = attr.ib(
default=None,
converter=MLTransformMLUserDataEncryption.from_dict,
validator=attr.validators.optional(attr.validators.instance_of(MLTransformMLUserDataEncryption)),
metadata={AttrMeta.PROPERTY_NAME: "MLUserDataEncryption"},
)
"""Doc: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-properties-glue-mltransform-transformencryption.html#cfn-glue-mltransform-transformencryption-mluserdataencryption"""
p_TaskRunSecurityConfigurationName: TypeHint.intrinsic_str = attr.ib(
default=None,
validator=attr.validators.optional(attr.validators.instance_of(TypeCheck.intrinsic_str_type)),
metadata={AttrMeta.PROPERTY_NAME: "TaskRunSecurityConfigurationName"},
)
"""Doc: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-properties-glue-mltransform-transformencryption.html#cfn-glue-mltransform-transformencryption-taskrunsecurityconfigurationname"""
@attr.s
class JobConnectionsList(Property):
"""
AWS Object Type = "AWS::Glue::Job.ConnectionsList"
Resource Document: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-properties-glue-job-connectionslist.html
Property Document:
- ``p_Connections``: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-properties-glue-job-connectionslist.html#cfn-glue-job-connectionslist-connections
"""
AWS_OBJECT_TYPE = "AWS::Glue::Job.ConnectionsList"
p_Connections: typing.List[TypeHint.intrinsic_str] = attr.ib(
default=None,
validator=attr.validators.optional(attr.validators.deep_iterable(member_validator=attr.validators.instance_of(TypeCheck.intrinsic_str_type), iterable_validator=attr.validators.instance_of(list))),
metadata={AttrMeta.PROPERTY_NAME: "Connections"},
)
"""Doc: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-properties-glue-job-connectionslist.html#cfn-glue-job-connectionslist-connections"""
@attr.s
class SecurityConfigurationCloudWatchEncryption(Property):
"""
AWS Object Type = "AWS::Glue::SecurityConfiguration.CloudWatchEncryption"
Resource Document: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-properties-glue-securityconfiguration-cloudwatchencryption.html
Property Document:
- ``p_CloudWatchEncryptionMode``: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-properties-glue-securityconfiguration-cloudwatchencryption.html#cfn-glue-securityconfiguration-cloudwatchencryption-cloudwatchencryptionmode
- ``p_KmsKeyArn``: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-properties-glue-securityconfiguration-cloudwatchencryption.html#cfn-glue-securityconfiguration-cloudwatchencryption-kmskeyarn
"""
AWS_OBJECT_TYPE = "AWS::Glue::SecurityConfiguration.CloudWatchEncryption"
p_CloudWatchEncryptionMode: TypeHint.intrinsic_str = attr.ib(
default=None,
validator=attr.validators.optional(attr.validators.instance_of(TypeCheck.intrinsic_str_type)),
metadata={AttrMeta.PROPERTY_NAME: "CloudWatchEncryptionMode"},
)
"""Doc: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-properties-glue-securityconfiguration-cloudwatchencryption.html#cfn-glue-securityconfiguration-cloudwatchencryption-cloudwatchencryptionmode"""
p_KmsKeyArn: TypeHint.intrinsic_str = attr.ib(
default=None,
validator=attr.validators.optional(attr.validators.instance_of(TypeCheck.intrinsic_str_type)),
metadata={AttrMeta.PROPERTY_NAME: "KmsKeyArn"},
)
"""Doc: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-properties-glue-securityconfiguration-cloudwatchencryption.html#cfn-glue-securityconfiguration-cloudwatchencryption-kmskeyarn"""
@attr.s
class CrawlerJdbcTarget(Property):
"""
AWS Object Type = "AWS::Glue::Crawler.JdbcTarget"
Resource Document: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-properties-glue-crawler-jdbctarget.html
Property Document:
- ``p_ConnectionName``: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-properties-glue-crawler-jdbctarget.html#cfn-glue-crawler-jdbctarget-connectionname
- ``p_Exclusions``: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-properties-glue-crawler-jdbctarget.html#cfn-glue-crawler-jdbctarget-exclusions
- ``p_Path``: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-properties-glue-crawler-jdbctarget.html#cfn-glue-crawler-jdbctarget-path
"""
AWS_OBJECT_TYPE = "AWS::Glue::Crawler.JdbcTarget"
p_ConnectionName: TypeHint.intrinsic_str = attr.ib(
default=None,
validator=attr.validators.optional(attr.validators.instance_of(TypeCheck.intrinsic_str_type)),
metadata={AttrMeta.PROPERTY_NAME: "ConnectionName"},
)
"""Doc: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-properties-glue-crawler-jdbctarget.html#cfn-glue-crawler-jdbctarget-connectionname"""
p_Exclusions: typing.List[TypeHint.intrinsic_str] = attr.ib(
default=None,
validator=attr.validators.optional(attr.validators.deep_iterable(member_validator=attr.validators.instance_of(TypeCheck.intrinsic_str_type), iterable_validator=attr.validators.instance_of(list))),
metadata={AttrMeta.PROPERTY_NAME: "Exclusions"},
)
"""Doc: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-properties-glue-crawler-jdbctarget.html#cfn-glue-crawler-jdbctarget-exclusions"""
p_Path: TypeHint.intrinsic_str = attr.ib(
default=None,
validator=attr.validators.optional(attr.validators.instance_of(TypeCheck.intrinsic_str_type)),
metadata={AttrMeta.PROPERTY_NAME: "Path"},
)
"""Doc: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-properties-glue-crawler-jdbctarget.html#cfn-glue-crawler-jdbctarget-path"""
@attr.s
class TableSchemaId(Property):
"""
AWS Object Type = "AWS::Glue::Table.SchemaId"
Resource Document: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-properties-glue-table-schemaid.html
Property Document:
- ``p_RegistryName``: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-properties-glue-table-schemaid.html#cfn-glue-table-schemaid-registryname
- ``p_SchemaArn``: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-properties-glue-table-schemaid.html#cfn-glue-table-schemaid-schemaarn
- ``p_SchemaName``: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-properties-glue-table-schemaid.html#cfn-glue-table-schemaid-schemaname
"""
AWS_OBJECT_TYPE = "AWS::Glue::Table.SchemaId"
p_RegistryName: TypeHint.intrinsic_str = attr.ib(
default=None,
validator=attr.validators.optional(attr.validators.instance_of(TypeCheck.intrinsic_str_type)),
metadata={AttrMeta.PROPERTY_NAME: "RegistryName"},
)
"""Doc: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-properties-glue-table-schemaid.html#cfn-glue-table-schemaid-registryname"""
p_SchemaArn: TypeHint.intrinsic_str = attr.ib(
default=None,
validator=attr.validators.optional(attr.validators.instance_of(TypeCheck.intrinsic_str_type)),
metadata={AttrMeta.PROPERTY_NAME: "SchemaArn"},
)
"""Doc: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-properties-glue-table-schemaid.html#cfn-glue-table-schemaid-schemaarn"""
p_SchemaName: TypeHint.intrinsic_str = attr.ib(
default=None,
validator=attr.validators.optional(attr.validators.instance_of(TypeCheck.intrinsic_str_type)),
metadata={AttrMeta.PROPERTY_NAME: "SchemaName"},
)
"""Doc: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-properties-glue-table-schemaid.html#cfn-glue-table-schemaid-schemaname"""
@attr.s
class TriggerPredicate(Property):
"""
AWS Object Type = "AWS::Glue::Trigger.Predicate"
Resource Document: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-properties-glue-trigger-predicate.html
Property Document:
- ``p_Conditions``: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-properties-glue-trigger-predicate.html#cfn-glue-trigger-predicate-conditions
- ``p_Logical``: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-properties-glue-trigger-predicate.html#cfn-glue-trigger-predicate-logical
"""
AWS_OBJECT_TYPE = "AWS::Glue::Trigger.Predicate"
p_Conditions: typing.List[typing.Union['TriggerCondition', dict]] = attr.ib(
default=None,
converter=TriggerCondition.from_list,
validator=attr.validators.optional(attr.validators.deep_iterable(member_validator=attr.validators.instance_of(TriggerCondition), iterable_validator=attr.validators.instance_of(list))),
metadata={AttrMeta.PROPERTY_NAME: "Conditions"},
)
"""Doc: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-properties-glue-trigger-predicate.html#cfn-glue-trigger-predicate-conditions"""
p_Logical: TypeHint.intrinsic_str = attr.ib(
default=None,
validator=attr.validators.optional(attr.validators.instance_of(TypeCheck.intrinsic_str_type)),
metadata={AttrMeta.PROPERTY_NAME: "Logical"},
)
"""Doc: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-properties-glue-trigger-predicate.html#cfn-glue-trigger-predicate-logical"""
@attr.s
class TableOrder(Property):
"""
AWS Object Type = "AWS::Glue::Table.Order"
Resource Document: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-properties-glue-table-order.html
Property Document:
- ``rp_Column``: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-properties-glue-table-order.html#cfn-glue-table-order-column
- ``rp_SortOrder``: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-properties-glue-table-order.html#cfn-glue-table-order-sortorder
"""
AWS_OBJECT_TYPE = "AWS::Glue::Table.Order"
rp_Column: TypeHint.intrinsic_str = attr.ib(
default=None,
validator=attr.validators.instance_of(TypeCheck.intrinsic_str_type),
metadata={AttrMeta.PROPERTY_NAME: "Column"},
)
"""Doc: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-properties-glue-table-order.html#cfn-glue-table-order-column"""
rp_SortOrder: int = attr.ib(
default=None,
validator=attr.validators.instance_of(int),
metadata={AttrMeta.PROPERTY_NAME: "SortOrder"},
)
"""Doc: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-properties-glue-table-order.html#cfn-glue-table-order-sortorder"""
@attr.s
class PartitionColumn(Property):
"""
AWS Object Type = "AWS::Glue::Partition.Column"
Resource Document: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-properties-glue-partition-column.html
Property Document:
- ``rp_Name``: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-properties-glue-partition-column.html#cfn-glue-partition-column-name
- ``p_Comment``: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-properties-glue-partition-column.html#cfn-glue-partition-column-comment
- ``p_Type``: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-properties-glue-partition-column.html#cfn-glue-partition-column-type
"""
AWS_OBJECT_TYPE = "AWS::Glue::Partition.Column"
rp_Name: TypeHint.intrinsic_str = attr.ib(
default=None,
validator=attr.validators.instance_of(TypeCheck.intrinsic_str_type),
metadata={AttrMeta.PROPERTY_NAME: "Name"},
)
"""Doc: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-properties-glue-partition-column.html#cfn-glue-partition-column-name"""
p_Comment: TypeHint.intrinsic_str = attr.ib(
default=None,
validator=attr.validators.optional(attr.validators.instance_of(TypeCheck.intrinsic_str_type)),
metadata={AttrMeta.PROPERTY_NAME: "Comment"},
)
"""Doc: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-properties-glue-partition-column.html#cfn-glue-partition-column-comment"""
p_Type: TypeHint.intrinsic_str = attr.ib(
default=None,
validator=attr.validators.optional(attr.validators.instance_of(TypeCheck.intrinsic_str_type)),
metadata={AttrMeta.PROPERTY_NAME: "Type"},
)
"""Doc: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-properties-glue-partition-column.html#cfn-glue-partition-column-type"""
@attr.s
class CrawlerDynamoDBTarget(Property):
"""
AWS Object Type = "AWS::Glue::Crawler.DynamoDBTarget"
Resource Document: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-properties-glue-crawler-dynamodbtarget.html
Property Document:
- ``p_Path``: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-properties-glue-crawler-dynamodbtarget.html#cfn-glue-crawler-dynamodbtarget-path
"""
AWS_OBJECT_TYPE = "AWS::Glue::Crawler.DynamoDBTarget"
p_Path: TypeHint.intrinsic_str = attr.ib(
default=None,
validator=attr.validators.optional(attr.validators.instance_of(TypeCheck.intrinsic_str_type)),
metadata={AttrMeta.PROPERTY_NAME: "Path"},
)
"""Doc: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-properties-glue-crawler-dynamodbtarget.html#cfn-glue-crawler-dynamodbtarget-path"""
@attr.s
class TableSkewedInfo(Property):
"""
AWS Object Type = "AWS::Glue::Table.SkewedInfo"
Resource Document: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-properties-glue-table-skewedinfo.html
Property Document:
- ``p_SkewedColumnNames``: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-properties-glue-table-skewedinfo.html#cfn-glue-table-skewedinfo-skewedcolumnnames
- ``p_SkewedColumnValueLocationMaps``: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-properties-glue-table-skewedinfo.html#cfn-glue-table-skewedinfo-skewedcolumnvaluelocationmaps
- ``p_SkewedColumnValues``: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-properties-glue-table-skewedinfo.html#cfn-glue-table-skewedinfo-skewedcolumnvalues
"""
AWS_OBJECT_TYPE = "AWS::Glue::Table.SkewedInfo"
p_SkewedColumnNames: typing.List[TypeHint.intrinsic_str] = attr.ib(
default=None,
validator=attr.validators.optional(attr.validators.deep_iterable(member_validator=attr.validators.instance_of(TypeCheck.intrinsic_str_type), iterable_validator=attr.validators.instance_of(list))),
metadata={AttrMeta.PROPERTY_NAME: "SkewedColumnNames"},
)
"""Doc: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-properties-glue-table-skewedinfo.html#cfn-glue-table-skewedinfo-skewedcolumnnames"""
p_SkewedColumnValueLocationMaps: dict = attr.ib(
default=None,
validator=attr.validators.optional(attr.validators.instance_of(dict)),
metadata={AttrMeta.PROPERTY_NAME: "SkewedColumnValueLocationMaps"},
)
"""Doc: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-properties-glue-table-skewedinfo.html#cfn-glue-table-skewedinfo-skewedcolumnvaluelocationmaps"""
p_SkewedColumnValues: typing.List[TypeHint.intrinsic_str] = attr.ib(
default=None,
validator=attr.validators.optional(attr.validators.deep_iterable(member_validator=attr.validators.instance_of(TypeCheck.intrinsic_str_type), iterable_validator=attr.validators.instance_of(list))),
metadata={AttrMeta.PROPERTY_NAME: "SkewedColumnValues"},
)
"""Doc: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-properties-glue-table-skewedinfo.html#cfn-glue-table-skewedinfo-skewedcolumnvalues"""
@attr.s
class TriggerNotificationProperty(Property):
"""
AWS Object Type = "AWS::Glue::Trigger.NotificationProperty"
Resource Document: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-properties-glue-trigger-notificationproperty.html
Property Document:
- ``p_NotifyDelayAfter``: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-properties-glue-trigger-notificationproperty.html#cfn-glue-trigger-notificationproperty-notifydelayafter
"""
AWS_OBJECT_TYPE = "AWS::Glue::Trigger.NotificationProperty"
p_NotifyDelayAfter: int = attr.ib(
default=None,
validator=attr.validators.optional(attr.validators.instance_of(int)),
metadata={AttrMeta.PROPERTY_NAME: "NotifyDelayAfter"},
)
"""Doc: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-properties-glue-trigger-notificationproperty.html#cfn-glue-trigger-notificationproperty-notifydelayafter"""
@attr.s
class ClassifierXMLClassifier(Property):
"""
AWS Object Type = "AWS::Glue::Classifier.XMLClassifier"
Resource Document: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-properties-glue-classifier-xmlclassifier.html
Property Document:
- ``rp_Classification``: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-properties-glue-classifier-xmlclassifier.html#cfn-glue-classifier-xmlclassifier-classification
- ``rp_RowTag``: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-properties-glue-classifier-xmlclassifier.html#cfn-glue-classifier-xmlclassifier-rowtag
- ``p_Name``: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-properties-glue-classifier-xmlclassifier.html#cfn-glue-classifier-xmlclassifier-name
"""
AWS_OBJECT_TYPE = "AWS::Glue::Classifier.XMLClassifier"
rp_Classification: TypeHint.intrinsic_str = attr.ib(
default=None,
validator=attr.validators.instance_of(TypeCheck.intrinsic_str_type),
metadata={AttrMeta.PROPERTY_NAME: "Classification"},
)
"""Doc: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-properties-glue-classifier-xmlclassifier.html#cfn-glue-classifier-xmlclassifier-classification"""
rp_RowTag: TypeHint.intrinsic_str = attr.ib(
default=None,
validator=attr.validators.instance_of(TypeCheck.intrinsic_str_type),
metadata={AttrMeta.PROPERTY_NAME: "RowTag"},
)
"""Doc: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-properties-glue-classifier-xmlclassifier.html#cfn-glue-classifier-xmlclassifier-rowtag"""
p_Name: TypeHint.intrinsic_str = attr.ib(
default=None,
validator=attr.validators.optional(attr.validators.instance_of(TypeCheck.intrinsic_str_type)),
metadata={AttrMeta.PROPERTY_NAME: "Name"},
)
"""Doc: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-properties-glue-classifier-xmlclassifier.html#cfn-glue-classifier-xmlclassifier-name"""
@attr.s
class MLTransformTransformParameters(Property):
"""
AWS Object Type = "AWS::Glue::MLTransform.TransformParameters"
Resource Document: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-properties-glue-mltransform-transformparameters.html
Property Document:
- ``rp_TransformType``: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-properties-glue-mltransform-transformparameters.html#cfn-glue-mltransform-transformparameters-transformtype
- ``p_FindMatchesParameters``: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-properties-glue-mltransform-transformparameters.html#cfn-glue-mltransform-transformparameters-findmatchesparameters
"""
AWS_OBJECT_TYPE = "AWS::Glue::MLTransform.TransformParameters"
rp_TransformType: TypeHint.intrinsic_str = attr.ib(
default=None,
validator=attr.validators.instance_of(TypeCheck.intrinsic_str_type),
metadata={AttrMeta.PROPERTY_NAME: "TransformType"},
)
"""Doc: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-properties-glue-mltransform-transformparameters.html#cfn-glue-mltransform-transformparameters-transformtype"""
p_FindMatchesParameters: typing.Union['MLTransformFindMatchesParameters', dict] = attr.ib(
default=None,
converter=MLTransformFindMatchesParameters.from_dict,
validator=attr.validators.optional(attr.validators.instance_of(MLTransformFindMatchesParameters)),
metadata={AttrMeta.PROPERTY_NAME: "FindMatchesParameters"},
)
"""Doc: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-properties-glue-mltransform-transformparameters.html#cfn-glue-mltransform-transformparameters-findmatchesparameters"""
@attr.s
class PartitionSkewedInfo(Property):
"""
AWS Object Type = "AWS::Glue::Partition.SkewedInfo"
Resource Document: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-properties-glue-partition-skewedinfo.html
Property Document:
- ``p_SkewedColumnNames``: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-properties-glue-partition-skewedinfo.html#cfn-glue-partition-skewedinfo-skewedcolumnnames
- ``p_SkewedColumnValueLocationMaps``: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-properties-glue-partition-skewedinfo.html#cfn-glue-partition-skewedinfo-skewedcolumnvaluelocationmaps
- ``p_SkewedColumnValues``: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-properties-glue-partition-skewedinfo.html#cfn-glue-partition-skewedinfo-skewedcolumnvalues
"""
AWS_OBJECT_TYPE = "AWS::Glue::Partition.SkewedInfo"
p_SkewedColumnNames: typing.List[TypeHint.intrinsic_str] = attr.ib(
default=None,
validator=attr.validators.optional(attr.validators.deep_iterable(member_validator=attr.validators.instance_of(TypeCheck.intrinsic_str_type), iterable_validator=attr.validators.instance_of(list))),
metadata={AttrMeta.PROPERTY_NAME: "SkewedColumnNames"},
)
"""Doc: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-properties-glue-partition-skewedinfo.html#cfn-glue-partition-skewedinfo-skewedcolumnnames"""
p_SkewedColumnValueLocationMaps: dict = attr.ib(
default=None,
validator=attr.validators.optional(attr.validators.instance_of(dict)),
metadata={AttrMeta.PROPERTY_NAME: "SkewedColumnValueLocationMaps"},
)
"""Doc: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-properties-glue-partition-skewedinfo.html#cfn-glue-partition-skewedinfo-skewedcolumnvaluelocationmaps"""
p_SkewedColumnValues: typing.List[TypeHint.intrinsic_str] = attr.ib(
default=None,
validator=attr.validators.optional(attr.validators.deep_iterable(member_validator=attr.validators.instance_of(TypeCheck.intrinsic_str_type), iterable_validator=attr.validators.instance_of(list))),
metadata={AttrMeta.PROPERTY_NAME: "SkewedColumnValues"},
)
"""Doc: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-properties-glue-partition-skewedinfo.html#cfn-glue-partition-skewedinfo-skewedcolumnvalues"""
@attr.s
class CrawlerS3Target(Property):
"""
AWS Object Type = "AWS::Glue::Crawler.S3Target"
Resource Document: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-properties-glue-crawler-s3target.html
Property Document:
- ``p_ConnectionName``: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-properties-glue-crawler-s3target.html#cfn-glue-crawler-s3target-connectionname
- ``p_Exclusions``: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-properties-glue-crawler-s3target.html#cfn-glue-crawler-s3target-exclusions
- ``p_Path``: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-properties-glue-crawler-s3target.html#cfn-glue-crawler-s3target-path
"""
AWS_OBJECT_TYPE = "AWS::Glue::Crawler.S3Target"
p_ConnectionName: TypeHint.intrinsic_str = attr.ib(
default=None,
validator=attr.validators.optional(attr.validators.instance_of(TypeCheck.intrinsic_str_type)),
metadata={AttrMeta.PROPERTY_NAME: "ConnectionName"},
)
"""Doc: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-properties-glue-crawler-s3target.html#cfn-glue-crawler-s3target-connectionname"""
p_Exclusions: typing.List[TypeHint.intrinsic_str] = attr.ib(
default=None,
validator=attr.validators.optional(attr.validators.deep_iterable(member_validator=attr.validators.instance_of(TypeCheck.intrinsic_str_type), iterable_validator=attr.validators.instance_of(list))),
metadata={AttrMeta.PROPERTY_NAME: "Exclusions"},
)
"""Doc: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-properties-glue-crawler-s3target.html#cfn-glue-crawler-s3target-exclusions"""
p_Path: TypeHint.intrinsic_str = attr.ib(
default=None,
validator=attr.validators.optional(attr.validators.instance_of(TypeCheck.intrinsic_str_type)),
metadata={AttrMeta.PROPERTY_NAME: "Path"},
)
"""Doc: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-properties-glue-crawler-s3target.html#cfn-glue-crawler-s3target-path"""
@attr.s
class PartitionSchemaId(Property):
"""
AWS Object Type = "AWS::Glue::Partition.SchemaId"
Resource Document: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-properties-glue-partition-schemaid.html
Property Document:
- ``p_RegistryName``: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-properties-glue-partition-schemaid.html#cfn-glue-partition-schemaid-registryname
- ``p_SchemaArn``: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-properties-glue-partition-schemaid.html#cfn-glue-partition-schemaid-schemaarn
- ``p_SchemaName``: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-properties-glue-partition-schemaid.html#cfn-glue-partition-schemaid-schemaname
"""
AWS_OBJECT_TYPE = "AWS::Glue::Partition.SchemaId"
p_RegistryName: TypeHint.intrinsic_str = attr.ib(
default=None,
validator=attr.validators.optional(attr.validators.instance_of(TypeCheck.intrinsic_str_type)),
metadata={AttrMeta.PROPERTY_NAME: "RegistryName"},
)
"""Doc: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-properties-glue-partition-schemaid.html#cfn-glue-partition-schemaid-registryname"""
p_SchemaArn: TypeHint.intrinsic_str = attr.ib(
default=None,
validator=attr.validators.optional(attr.validators.instance_of(TypeCheck.intrinsic_str_type)),
metadata={AttrMeta.PROPERTY_NAME: "SchemaArn"},
)
"""Doc: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-properties-glue-partition-schemaid.html#cfn-glue-partition-schemaid-schemaarn"""
p_SchemaName: TypeHint.intrinsic_str = attr.ib(
default=None,
validator=attr.validators.optional(attr.validators.instance_of(TypeCheck.intrinsic_str_type)),
metadata={AttrMeta.PROPERTY_NAME: "SchemaName"},
)
"""Doc: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-properties-glue-partition-schemaid.html#cfn-glue-partition-schemaid-schemaname"""
@attr.s
class JobNotificationProperty(Property):
"""
AWS Object Type = "AWS::Glue::Job.NotificationProperty"
Resource Document: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-properties-glue-job-notificationproperty.html
Property Document:
- ``p_NotifyDelayAfter``: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-properties-glue-job-notificationproperty.html#cfn-glue-job-notificationproperty-notifydelayafter
"""
AWS_OBJECT_TYPE = "AWS::Glue::Job.NotificationProperty"
p_NotifyDelayAfter: int = attr.ib(
default=None,
validator=attr.validators.optional(attr.validators.instance_of(int)),
metadata={AttrMeta.PROPERTY_NAME: "NotifyDelayAfter"},
)
"""Doc: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-properties-glue-job-notificationproperty.html#cfn-glue-job-notificationproperty-notifydelayafter"""
@attr.s
class JobExecutionProperty(Property):
"""
AWS Object Type = "AWS::Glue::Job.ExecutionProperty"
Resource Document: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-properties-glue-job-executionproperty.html
Property Document:
- ``p_MaxConcurrentRuns``: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-properties-glue-job-executionproperty.html#cfn-glue-job-executionproperty-maxconcurrentruns
"""
AWS_OBJECT_TYPE = "AWS::Glue::Job.ExecutionProperty"
p_MaxConcurrentRuns: float = attr.ib(
default=None,
validator=attr.validators.optional(attr.validators.instance_of(float)),
metadata={AttrMeta.PROPERTY_NAME: "MaxConcurrentRuns"},
)
"""Doc: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-properties-glue-job-executionproperty.html#cfn-glue-job-executionproperty-maxconcurrentruns"""
@attr.s
class TableSchemaReference(Property):
"""
AWS Object Type = "AWS::Glue::Table.SchemaReference"
Resource Document: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-properties-glue-table-schemareference.html
Property Document:
- ``p_SchameVersionId``: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-properties-glue-table-schemareference.html#cfn-glue-table-schemareference-schameversionid
- ``p_SchemaId``: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-properties-glue-table-schemareference.html#cfn-glue-table-schemareference-schemaid
- ``p_SchemaVersionNumber``: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-properties-glue-table-schemareference.html#cfn-glue-table-schemareference-schemaversionnumber
"""
AWS_OBJECT_TYPE = "AWS::Glue::Table.SchemaReference"
p_SchameVersionId: TypeHint.intrinsic_str = attr.ib(
default=None,
validator=attr.validators.optional(attr.validators.instance_of(TypeCheck.intrinsic_str_type)),
metadata={AttrMeta.PROPERTY_NAME: "SchameVersionId"},
)
"""Doc: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-properties-glue-table-schemareference.html#cfn-glue-table-schemareference-schameversionid"""
p_SchemaId: typing.Union['TableSchemaId', dict] = attr.ib(
default=None,
converter=TableSchemaId.from_dict,
validator=attr.validators.optional(attr.validators.instance_of(TableSchemaId)),
metadata={AttrMeta.PROPERTY_NAME: "SchemaId"},
)
"""Doc: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-properties-glue-table-schemareference.html#cfn-glue-table-schemareference-schemaid"""
p_SchemaVersionNumber: int = attr.ib(
default=None,
validator=attr.validators.optional(attr.validators.instance_of(int)),
metadata={AttrMeta.PROPERTY_NAME: "SchemaVersionNumber"},
)
"""Doc: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-properties-glue-table-schemareference.html#cfn-glue-table-schemareference-schemaversionnumber"""
@attr.s
class TableTableIdentifier(Property):
"""
AWS Object Type = "AWS::Glue::Table.TableIdentifier"
Resource Document: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-properties-glue-table-tableidentifier.html
Property Document:
- ``p_CatalogId``: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-properties-glue-table-tableidentifier.html#cfn-glue-table-tableidentifier-catalogid
- ``p_DatabaseName``: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-properties-glue-table-tableidentifier.html#cfn-glue-table-tableidentifier-databasename
- ``p_Name``: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-properties-glue-table-tableidentifier.html#cfn-glue-table-tableidentifier-name
"""
AWS_OBJECT_TYPE = "AWS::Glue::Table.TableIdentifier"
p_CatalogId: TypeHint.intrinsic_str = attr.ib(
default=None,
validator=attr.validators.optional(attr.validators.instance_of(TypeCheck.intrinsic_str_type)),
metadata={AttrMeta.PROPERTY_NAME: "CatalogId"},
)
"""Doc: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-properties-glue-table-tableidentifier.html#cfn-glue-table-tableidentifier-catalogid"""
p_DatabaseName: TypeHint.intrinsic_str = attr.ib(
default=None,
validator=attr.validators.optional(attr.validators.instance_of(TypeCheck.intrinsic_str_type)),
metadata={AttrMeta.PROPERTY_NAME: "DatabaseName"},
)
"""Doc: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-properties-glue-table-tableidentifier.html#cfn-glue-table-tableidentifier-databasename"""
p_Name: TypeHint.intrinsic_str = attr.ib(
default=None,
validator=attr.validators.optional(attr.validators.instance_of(TypeCheck.intrinsic_str_type)),
metadata={AttrMeta.PROPERTY_NAME: "Name"},
)
"""Doc: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-properties-glue-table-tableidentifier.html#cfn-glue-table-tableidentifier-name"""
@attr.s
class DatabaseDatabaseInput(Property):
"""
AWS Object Type = "AWS::Glue::Database.DatabaseInput"
Resource Document: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-properties-glue-database-databaseinput.html
Property Document:
- ``p_Description``: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-properties-glue-database-databaseinput.html#cfn-glue-database-databaseinput-description
- ``p_LocationUri``: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-properties-glue-database-databaseinput.html#cfn-glue-database-databaseinput-locationuri
- ``p_Name``: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-properties-glue-database-databaseinput.html#cfn-glue-database-databaseinput-name
- ``p_Parameters``: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-properties-glue-database-databaseinput.html#cfn-glue-database-databaseinput-parameters
- ``p_TargetDatabase``: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-properties-glue-database-databaseinput.html#cfn-glue-database-databaseinput-targetdatabase
"""
AWS_OBJECT_TYPE = "AWS::Glue::Database.DatabaseInput"
p_Description: TypeHint.intrinsic_str = attr.ib(
default=None,
validator=attr.validators.optional(attr.validators.instance_of(TypeCheck.intrinsic_str_type)),
metadata={AttrMeta.PROPERTY_NAME: "Description"},
)
"""Doc: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-properties-glue-database-databaseinput.html#cfn-glue-database-databaseinput-description"""
p_LocationUri: TypeHint.intrinsic_str = attr.ib(
default=None,
validator=attr.validators.optional(attr.validators.instance_of(TypeCheck.intrinsic_str_type)),
metadata={AttrMeta.PROPERTY_NAME: "LocationUri"},
)
"""Doc: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-properties-glue-database-databaseinput.html#cfn-glue-database-databaseinput-locationuri"""
p_Name: TypeHint.intrinsic_str = attr.ib(
default=None,
validator=attr.validators.optional(attr.validators.instance_of(TypeCheck.intrinsic_str_type)),
metadata={AttrMeta.PROPERTY_NAME: "Name"},
)
"""Doc: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-properties-glue-database-databaseinput.html#cfn-glue-database-databaseinput-name"""
p_Parameters: dict = attr.ib(
default=None,
validator=attr.validators.optional(attr.validators.instance_of(dict)),
metadata={AttrMeta.PROPERTY_NAME: "Parameters"},
)
"""Doc: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-properties-glue-database-databaseinput.html#cfn-glue-database-databaseinput-parameters"""
p_TargetDatabase: typing.Union['DatabaseDatabaseIdentifier', dict] = attr.ib(
default=None,
converter=DatabaseDatabaseIdentifier.from_dict,
validator=attr.validators.optional(attr.validators.instance_of(DatabaseDatabaseIdentifier)),
metadata={AttrMeta.PROPERTY_NAME: "TargetDatabase"},
)
"""Doc: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-properties-glue-database-databaseinput.html#cfn-glue-database-databaseinput-targetdatabase"""
@attr.s
class ConnectionConnectionInput(Property):
"""
AWS Object Type = "AWS::Glue::Connection.ConnectionInput"
Resource Document: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-properties-glue-connection-connectioninput.html
Property Document:
- ``rp_ConnectionType``: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-properties-glue-connection-connectioninput.html#cfn-glue-connection-connectioninput-connectiontype
- ``p_ConnectionProperties``: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-properties-glue-connection-connectioninput.html#cfn-glue-connection-connectioninput-connectionproperties
- ``p_Description``: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-properties-glue-connection-connectioninput.html#cfn-glue-connection-connectioninput-description
- ``p_MatchCriteria``: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-properties-glue-connection-connectioninput.html#cfn-glue-connection-connectioninput-matchcriteria
- ``p_Name``: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-properties-glue-connection-connectioninput.html#cfn-glue-connection-connectioninput-name
- ``p_PhysicalConnectionRequirements``: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-properties-glue-connection-connectioninput.html#cfn-glue-connection-connectioninput-physicalconnectionrequirements
"""
AWS_OBJECT_TYPE = "AWS::Glue::Connection.ConnectionInput"
rp_ConnectionType: TypeHint.intrinsic_str = attr.ib(
default=None,
validator=attr.validators.instance_of(TypeCheck.intrinsic_str_type),
metadata={AttrMeta.PROPERTY_NAME: "ConnectionType"},
)
"""Doc: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-properties-glue-connection-connectioninput.html#cfn-glue-connection-connectioninput-connectiontype"""
p_ConnectionProperties: dict = attr.ib(
default=None,
validator=attr.validators.optional(attr.validators.instance_of(dict)),
metadata={AttrMeta.PROPERTY_NAME: "ConnectionProperties"},
)
"""Doc: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-properties-glue-connection-connectioninput.html#cfn-glue-connection-connectioninput-connectionproperties"""
p_Description: TypeHint.intrinsic_str = attr.ib(
default=None,
validator=attr.validators.optional(attr.validators.instance_of(TypeCheck.intrinsic_str_type)),
metadata={AttrMeta.PROPERTY_NAME: "Description"},
)
"""Doc: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-properties-glue-connection-connectioninput.html#cfn-glue-connection-connectioninput-description"""
p_MatchCriteria: typing.List[TypeHint.intrinsic_str] = attr.ib(
default=None,
validator=attr.validators.optional(attr.validators.deep_iterable(member_validator=attr.validators.instance_of(TypeCheck.intrinsic_str_type), iterable_validator=attr.validators.instance_of(list))),
metadata={AttrMeta.PROPERTY_NAME: "MatchCriteria"},
)
"""Doc: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-properties-glue-connection-connectioninput.html#cfn-glue-connection-connectioninput-matchcriteria"""
p_Name: TypeHint.intrinsic_str = attr.ib(
default=None,
validator=attr.validators.optional(attr.validators.instance_of(TypeCheck.intrinsic_str_type)),
metadata={AttrMeta.PROPERTY_NAME: "Name"},
)
"""Doc: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-properties-glue-connection-connectioninput.html#cfn-glue-connection-connectioninput-name"""
p_PhysicalConnectionRequirements: typing.Union['ConnectionPhysicalConnectionRequirements', dict] = attr.ib(
default=None,
converter=ConnectionPhysicalConnectionRequirements.from_dict,
validator=attr.validators.optional(attr.validators.instance_of(ConnectionPhysicalConnectionRequirements)),
metadata={AttrMeta.PROPERTY_NAME: "PhysicalConnectionRequirements"},
)
"""Doc: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-properties-glue-connection-connectioninput.html#cfn-glue-connection-connectioninput-physicalconnectionrequirements"""
@attr.s
class DataCatalogEncryptionSettingsDataCatalogEncryptionSettings(Property):
"""
AWS Object Type = "AWS::Glue::DataCatalogEncryptionSettings.DataCatalogEncryptionSettings"
Resource Document: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-properties-glue-datacatalogencryptionsettings-datacatalogencryptionsettings.html
Property Document:
- ``p_ConnectionPasswordEncryption``: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-properties-glue-datacatalogencryptionsettings-datacatalogencryptionsettings.html#cfn-glue-datacatalogencryptionsettings-datacatalogencryptionsettings-connectionpasswordencryption
- ``p_EncryptionAtRest``: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-properties-glue-datacatalogencryptionsettings-datacatalogencryptionsettings.html#cfn-glue-datacatalogencryptionsettings-datacatalogencryptionsettings-encryptionatrest
"""
AWS_OBJECT_TYPE = "AWS::Glue::DataCatalogEncryptionSettings.DataCatalogEncryptionSettings"
p_ConnectionPasswordEncryption: typing.Union['DataCatalogEncryptionSettingsConnectionPasswordEncryption', dict] = attr.ib(
default=None,
converter=DataCatalogEncryptionSettingsConnectionPasswordEncryption.from_dict,
validator=attr.validators.optional(attr.validators.instance_of(DataCatalogEncryptionSettingsConnectionPasswordEncryption)),
metadata={AttrMeta.PROPERTY_NAME: "ConnectionPasswordEncryption"},
)
"""Doc: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-properties-glue-datacatalogencryptionsettings-datacatalogencryptionsettings.html#cfn-glue-datacatalogencryptionsettings-datacatalogencryptionsettings-connectionpasswordencryption"""
p_EncryptionAtRest: typing.Union['DataCatalogEncryptionSettingsEncryptionAtRest', dict] = attr.ib(
default=None,
converter=DataCatalogEncryptionSettingsEncryptionAtRest.from_dict,
validator=attr.validators.optional(attr.validators.instance_of(DataCatalogEncryptionSettingsEncryptionAtRest)),
metadata={AttrMeta.PROPERTY_NAME: "EncryptionAtRest"},
)
"""Doc: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-properties-glue-datacatalogencryptionsettings-datacatalogencryptionsettings.html#cfn-glue-datacatalogencryptionsettings-datacatalogencryptionsettings-encryptionatrest"""
@attr.s
class SecurityConfigurationEncryptionConfiguration(Property):
"""
AWS Object Type = "AWS::Glue::SecurityConfiguration.EncryptionConfiguration"
Resource Document: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-properties-glue-securityconfiguration-encryptionconfiguration.html
Property Document:
- ``p_CloudWatchEncryption``: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-properties-glue-securityconfiguration-encryptionconfiguration.html#cfn-glue-securityconfiguration-encryptionconfiguration-cloudwatchencryption
- ``p_JobBookmarksEncryption``: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-properties-glue-securityconfiguration-encryptionconfiguration.html#cfn-glue-securityconfiguration-encryptionconfiguration-jobbookmarksencryption
- ``p_S3Encryptions``: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-properties-glue-securityconfiguration-encryptionconfiguration.html#cfn-glue-securityconfiguration-encryptionconfiguration-s3encryptions
"""
AWS_OBJECT_TYPE = "AWS::Glue::SecurityConfiguration.EncryptionConfiguration"
p_CloudWatchEncryption: typing.Union['SecurityConfigurationCloudWatchEncryption', dict] = attr.ib(
default=None,
converter=SecurityConfigurationCloudWatchEncryption.from_dict,
validator=attr.validators.optional(attr.validators.instance_of(SecurityConfigurationCloudWatchEncryption)),
metadata={AttrMeta.PROPERTY_NAME: "CloudWatchEncryption"},
)
"""Doc: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-properties-glue-securityconfiguration-encryptionconfiguration.html#cfn-glue-securityconfiguration-encryptionconfiguration-cloudwatchencryption"""
p_JobBookmarksEncryption: typing.Union['SecurityConfigurationJobBookmarksEncryption', dict] = attr.ib(
default=None,
converter=SecurityConfigurationJobBookmarksEncryption.from_dict,
validator=attr.validators.optional(attr.validators.instance_of(SecurityConfigurationJobBookmarksEncryption)),
metadata={AttrMeta.PROPERTY_NAME: "JobBookmarksEncryption"},
)
"""Doc: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-properties-glue-securityconfiguration-encryptionconfiguration.html#cfn-glue-securityconfiguration-encryptionconfiguration-jobbookmarksencryption"""
p_S3Encryptions: typing.Union['SecurityConfigurationS3Encryptions', dict] = attr.ib(
default=None,
converter=SecurityConfigurationS3Encryptions.from_dict,
validator=attr.validators.optional(attr.validators.instance_of(SecurityConfigurationS3Encryptions)),
metadata={AttrMeta.PROPERTY_NAME: "S3Encryptions"},
)
"""Doc: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-properties-glue-securityconfiguration-encryptionconfiguration.html#cfn-glue-securityconfiguration-encryptionconfiguration-s3encryptions"""
@attr.s
class PartitionSchemaReference(Property):
"""
AWS Object Type = "AWS::Glue::Partition.SchemaReference"
Resource Document: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-properties-glue-partition-schemareference.html
Property Document:
- ``p_SchameVersionId``: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-properties-glue-partition-schemareference.html#cfn-glue-partition-schemareference-schameversionid
- ``p_SchemaId``: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-properties-glue-partition-schemareference.html#cfn-glue-partition-schemareference-schemaid
- ``p_SchemaVersionNumber``: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-properties-glue-partition-schemareference.html#cfn-glue-partition-schemareference-schemaversionnumber
"""
AWS_OBJECT_TYPE = "AWS::Glue::Partition.SchemaReference"
p_SchameVersionId: TypeHint.intrinsic_str = attr.ib(
default=None,
validator=attr.validators.optional(attr.validators.instance_of(TypeCheck.intrinsic_str_type)),
metadata={AttrMeta.PROPERTY_NAME: "SchameVersionId"},
)
"""Doc: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-properties-glue-partition-schemareference.html#cfn-glue-partition-schemareference-schameversionid"""
p_SchemaId: typing.Union['PartitionSchemaId', dict] = attr.ib(
default=None,
converter=PartitionSchemaId.from_dict,
validator=attr.validators.optional(attr.validators.instance_of(PartitionSchemaId)),
metadata={AttrMeta.PROPERTY_NAME: "SchemaId"},
)
"""Doc: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-properties-glue-partition-schemareference.html#cfn-glue-partition-schemareference-schemaid"""
p_SchemaVersionNumber: int = attr.ib(
default=None,
validator=attr.validators.optional(attr.validators.instance_of(int)),
metadata={AttrMeta.PROPERTY_NAME: "SchemaVersionNumber"},
)
"""Doc: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-properties-glue-partition-schemareference.html#cfn-glue-partition-schemareference-schemaversionnumber"""
@attr.s
class MLTransformInputRecordTables(Property):
"""
AWS Object Type = "AWS::Glue::MLTransform.InputRecordTables"
Resource Document: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-properties-glue-mltransform-inputrecordtables.html
Property Document:
- ``p_GlueTables``: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-properties-glue-mltransform-inputrecordtables.html#cfn-glue-mltransform-inputrecordtables-gluetables
"""
AWS_OBJECT_TYPE = "AWS::Glue::MLTransform.InputRecordTables"
p_GlueTables: typing.List[typing.Union['MLTransformGlueTables', dict]] = attr.ib(
default=None,
converter=MLTransformGlueTables.from_list,
validator=attr.validators.optional(attr.validators.deep_iterable(member_validator=attr.validators.instance_of(MLTransformGlueTables), iterable_validator=attr.validators.instance_of(list))),
metadata={AttrMeta.PROPERTY_NAME: "GlueTables"},
)
"""Doc: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-properties-glue-mltransform-inputrecordtables.html#cfn-glue-mltransform-inputrecordtables-gluetables"""
@attr.s
class CrawlerTargets(Property):
"""
AWS Object Type = "AWS::Glue::Crawler.Targets"
Resource Document: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-properties-glue-crawler-targets.html
Property Document:
- ``p_CatalogTargets``: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-properties-glue-crawler-targets.html#cfn-glue-crawler-targets-catalogtargets
- ``p_DynamoDBTargets``: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-properties-glue-crawler-targets.html#cfn-glue-crawler-targets-dynamodbtargets
- ``p_JdbcTargets``: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-properties-glue-crawler-targets.html#cfn-glue-crawler-targets-jdbctargets
- ``p_S3Targets``: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-properties-glue-crawler-targets.html#cfn-glue-crawler-targets-s3targets
"""
AWS_OBJECT_TYPE = "AWS::Glue::Crawler.Targets"
p_CatalogTargets: typing.List[typing.Union['CrawlerCatalogTarget', dict]] = attr.ib(
default=None,
converter=CrawlerCatalogTarget.from_list,
validator=attr.validators.optional(attr.validators.deep_iterable(member_validator=attr.validators.instance_of(CrawlerCatalogTarget), iterable_validator=attr.validators.instance_of(list))),
metadata={AttrMeta.PROPERTY_NAME: "CatalogTargets"},
)
"""Doc: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-properties-glue-crawler-targets.html#cfn-glue-crawler-targets-catalogtargets"""
p_DynamoDBTargets: typing.List[typing.Union['CrawlerDynamoDBTarget', dict]] = attr.ib(
default=None,
converter=CrawlerDynamoDBTarget.from_list,
validator=attr.validators.optional(attr.validators.deep_iterable(member_validator=attr.validators.instance_of(CrawlerDynamoDBTarget), iterable_validator=attr.validators.instance_of(list))),
metadata={AttrMeta.PROPERTY_NAME: "DynamoDBTargets"},
)
"""Doc: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-properties-glue-crawler-targets.html#cfn-glue-crawler-targets-dynamodbtargets"""
p_JdbcTargets: typing.List[typing.Union['CrawlerJdbcTarget', dict]] = attr.ib(
default=None,
converter=CrawlerJdbcTarget.from_list,
validator=attr.validators.optional(attr.validators.deep_iterable(member_validator=attr.validators.instance_of(CrawlerJdbcTarget), iterable_validator=attr.validators.instance_of(list))),
metadata={AttrMeta.PROPERTY_NAME: "JdbcTargets"},
)
"""Doc: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-properties-glue-crawler-targets.html#cfn-glue-crawler-targets-jdbctargets"""
p_S3Targets: typing.List[typing.Union['CrawlerS3Target', dict]] = attr.ib(
default=None,
converter=CrawlerS3Target.from_list,
validator=attr.validators.optional(attr.validators.deep_iterable(member_validator=attr.validators.instance_of(CrawlerS3Target), iterable_validator=attr.validators.instance_of(list))),
metadata={AttrMeta.PROPERTY_NAME: "S3Targets"},
)
"""Doc: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-properties-glue-crawler-targets.html#cfn-glue-crawler-targets-s3targets"""
@attr.s
class TriggerAction(Property):
"""
AWS Object Type = "AWS::Glue::Trigger.Action"
Resource Document: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-properties-glue-trigger-action.html
Property Document:
- ``p_Arguments``: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-properties-glue-trigger-action.html#cfn-glue-trigger-action-arguments
- ``p_CrawlerName``: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-properties-glue-trigger-action.html#cfn-glue-trigger-action-crawlername
- ``p_JobName``: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-properties-glue-trigger-action.html#cfn-glue-trigger-action-jobname
- ``p_NotificationProperty``: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-properties-glue-trigger-action.html#cfn-glue-trigger-action-notificationproperty
- ``p_SecurityConfiguration``: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-properties-glue-trigger-action.html#cfn-glue-trigger-action-securityconfiguration
- ``p_Timeout``: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-properties-glue-trigger-action.html#cfn-glue-trigger-action-timeout
"""
AWS_OBJECT_TYPE = "AWS::Glue::Trigger.Action"
p_Arguments: dict = attr.ib(
default=None,
validator=attr.validators.optional(attr.validators.instance_of(dict)),
metadata={AttrMeta.PROPERTY_NAME: "Arguments"},
)
"""Doc: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-properties-glue-trigger-action.html#cfn-glue-trigger-action-arguments"""
p_CrawlerName: TypeHint.intrinsic_str = attr.ib(
default=None,
validator=attr.validators.optional(attr.validators.instance_of(TypeCheck.intrinsic_str_type)),
metadata={AttrMeta.PROPERTY_NAME: "CrawlerName"},
)
"""Doc: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-properties-glue-trigger-action.html#cfn-glue-trigger-action-crawlername"""
p_JobName: TypeHint.intrinsic_str = attr.ib(
default=None,
validator=attr.validators.optional(attr.validators.instance_of(TypeCheck.intrinsic_str_type)),
metadata={AttrMeta.PROPERTY_NAME: "JobName"},
)
"""Doc: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-properties-glue-trigger-action.html#cfn-glue-trigger-action-jobname"""
p_NotificationProperty: typing.Union['TriggerNotificationProperty', dict] = attr.ib(
default=None,
converter=TriggerNotificationProperty.from_dict,
validator=attr.validators.optional(attr.validators.instance_of(TriggerNotificationProperty)),
metadata={AttrMeta.PROPERTY_NAME: "NotificationProperty"},
)
"""Doc: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-properties-glue-trigger-action.html#cfn-glue-trigger-action-notificationproperty"""
p_SecurityConfiguration: TypeHint.intrinsic_str = attr.ib(
default=None,
validator=attr.validators.optional(attr.validators.instance_of(TypeCheck.intrinsic_str_type)),
metadata={AttrMeta.PROPERTY_NAME: "SecurityConfiguration"},
)
"""Doc: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-properties-glue-trigger-action.html#cfn-glue-trigger-action-securityconfiguration"""
p_Timeout: int = attr.ib(
default=None,
validator=attr.validators.optional(attr.validators.instance_of(int)),
metadata={AttrMeta.PROPERTY_NAME: "Timeout"},
)
"""Doc: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-properties-glue-trigger-action.html#cfn-glue-trigger-action-timeout"""
@attr.s
class PartitionStorageDescriptor(Property):
"""
AWS Object Type = "AWS::Glue::Partition.StorageDescriptor"
Resource Document: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-properties-glue-partition-storagedescriptor.html
Property Document:
- ``p_BucketColumns``: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-properties-glue-partition-storagedescriptor.html#cfn-glue-partition-storagedescriptor-bucketcolumns
- ``p_Columns``: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-properties-glue-partition-storagedescriptor.html#cfn-glue-partition-storagedescriptor-columns
- ``p_Compressed``: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-properties-glue-partition-storagedescriptor.html#cfn-glue-partition-storagedescriptor-compressed
- ``p_InputFormat``: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-properties-glue-partition-storagedescriptor.html#cfn-glue-partition-storagedescriptor-inputformat
- ``p_Location``: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-properties-glue-partition-storagedescriptor.html#cfn-glue-partition-storagedescriptor-location
- ``p_NumberOfBuckets``: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-properties-glue-partition-storagedescriptor.html#cfn-glue-partition-storagedescriptor-numberofbuckets
- ``p_OutputFormat``: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-properties-glue-partition-storagedescriptor.html#cfn-glue-partition-storagedescriptor-outputformat
- ``p_Parameters``: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-properties-glue-partition-storagedescriptor.html#cfn-glue-partition-storagedescriptor-parameters
- ``p_SchemaReference``: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-properties-glue-partition-storagedescriptor.html#cfn-glue-partition-storagedescriptor-schemareference
- ``p_SerdeInfo``: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-properties-glue-partition-storagedescriptor.html#cfn-glue-partition-storagedescriptor-serdeinfo
- ``p_SkewedInfo``: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-properties-glue-partition-storagedescriptor.html#cfn-glue-partition-storagedescriptor-skewedinfo
- ``p_SortColumns``: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-properties-glue-partition-storagedescriptor.html#cfn-glue-partition-storagedescriptor-sortcolumns
- ``p_StoredAsSubDirectories``: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-properties-glue-partition-storagedescriptor.html#cfn-glue-partition-storagedescriptor-storedassubdirectories
"""
AWS_OBJECT_TYPE = "AWS::Glue::Partition.StorageDescriptor"
p_BucketColumns: typing.List[TypeHint.intrinsic_str] = attr.ib(
default=None,
validator=attr.validators.optional(attr.validators.deep_iterable(member_validator=attr.validators.instance_of(TypeCheck.intrinsic_str_type), iterable_validator=attr.validators.instance_of(list))),
metadata={AttrMeta.PROPERTY_NAME: "BucketColumns"},
)
"""Doc: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-properties-glue-partition-storagedescriptor.html#cfn-glue-partition-storagedescriptor-bucketcolumns"""
p_Columns: typing.List[typing.Union['PartitionColumn', dict]] = attr.ib(
default=None,
converter=PartitionColumn.from_list,
validator=attr.validators.optional(attr.validators.deep_iterable(member_validator=attr.validators.instance_of(PartitionColumn), iterable_validator=attr.validators.instance_of(list))),
metadata={AttrMeta.PROPERTY_NAME: "Columns"},
)
"""Doc: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-properties-glue-partition-storagedescriptor.html#cfn-glue-partition-storagedescriptor-columns"""
p_Compressed: bool = attr.ib(
default=None,
validator=attr.validators.optional(attr.validators.instance_of(bool)),
metadata={AttrMeta.PROPERTY_NAME: "Compressed"},
)
"""Doc: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-properties-glue-partition-storagedescriptor.html#cfn-glue-partition-storagedescriptor-compressed"""
p_InputFormat: TypeHint.intrinsic_str = attr.ib(
default=None,
validator=attr.validators.optional(attr.validators.instance_of(TypeCheck.intrinsic_str_type)),
metadata={AttrMeta.PROPERTY_NAME: "InputFormat"},
)
"""Doc: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-properties-glue-partition-storagedescriptor.html#cfn-glue-partition-storagedescriptor-inputformat"""
p_Location: TypeHint.intrinsic_str = attr.ib(
default=None,
validator=attr.validators.optional(attr.validators.instance_of(TypeCheck.intrinsic_str_type)),
metadata={AttrMeta.PROPERTY_NAME: "Location"},
)
"""Doc: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-properties-glue-partition-storagedescriptor.html#cfn-glue-partition-storagedescriptor-location"""
p_NumberOfBuckets: int = attr.ib(
default=None,
validator=attr.validators.optional(attr.validators.instance_of(int)),
metadata={AttrMeta.PROPERTY_NAME: "NumberOfBuckets"},
)
"""Doc: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-properties-glue-partition-storagedescriptor.html#cfn-glue-partition-storagedescriptor-numberofbuckets"""
p_OutputFormat: TypeHint.intrinsic_str = attr.ib(
default=None,
validator=attr.validators.optional(attr.validators.instance_of(TypeCheck.intrinsic_str_type)),
metadata={AttrMeta.PROPERTY_NAME: "OutputFormat"},
)
"""Doc: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-properties-glue-partition-storagedescriptor.html#cfn-glue-partition-storagedescriptor-outputformat"""
p_Parameters: dict = attr.ib(
default=None,
validator=attr.validators.optional(attr.validators.instance_of(dict)),
metadata={AttrMeta.PROPERTY_NAME: "Parameters"},
)
"""Doc: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-properties-glue-partition-storagedescriptor.html#cfn-glue-partition-storagedescriptor-parameters"""
p_SchemaReference: typing.Union['PartitionSchemaReference', dict] = attr.ib(
default=None,
converter=PartitionSchemaReference.from_dict,
validator=attr.validators.optional(attr.validators.instance_of(PartitionSchemaReference)),
metadata={AttrMeta.PROPERTY_NAME: "SchemaReference"},
)
"""Doc: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-properties-glue-partition-storagedescriptor.html#cfn-glue-partition-storagedescriptor-schemareference"""
p_SerdeInfo: typing.Union['PartitionSerdeInfo', dict] = attr.ib(
default=None,
converter=PartitionSerdeInfo.from_dict,
validator=attr.validators.optional(attr.validators.instance_of(PartitionSerdeInfo)),
metadata={AttrMeta.PROPERTY_NAME: "SerdeInfo"},
)
"""Doc: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-properties-glue-partition-storagedescriptor.html#cfn-glue-partition-storagedescriptor-serdeinfo"""
p_SkewedInfo: typing.Union['PartitionSkewedInfo', dict] = attr.ib(
default=None,
converter=PartitionSkewedInfo.from_dict,
validator=attr.validators.optional(attr.validators.instance_of(PartitionSkewedInfo)),
metadata={AttrMeta.PROPERTY_NAME: "SkewedInfo"},
)
"""Doc: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-properties-glue-partition-storagedescriptor.html#cfn-glue-partition-storagedescriptor-skewedinfo"""
p_SortColumns: typing.List[typing.Union['PartitionOrder', dict]] = attr.ib(
default=None,
converter=PartitionOrder.from_list,
validator=attr.validators.optional(attr.validators.deep_iterable(member_validator=attr.validators.instance_of(PartitionOrder), iterable_validator=attr.validators.instance_of(list))),
metadata={AttrMeta.PROPERTY_NAME: "SortColumns"},
)
"""Doc: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-properties-glue-partition-storagedescriptor.html#cfn-glue-partition-storagedescriptor-sortcolumns"""
p_StoredAsSubDirectories: bool = attr.ib(
default=None,
validator=attr.validators.optional(attr.validators.instance_of(bool)),
metadata={AttrMeta.PROPERTY_NAME: "StoredAsSubDirectories"},
)
"""Doc: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-properties-glue-partition-storagedescriptor.html#cfn-glue-partition-storagedescriptor-storedassubdirectories"""
@attr.s
class TableStorageDescriptor(Property):
"""
AWS Object Type = "AWS::Glue::Table.StorageDescriptor"
Resource Document: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-properties-glue-table-storagedescriptor.html
Property Document:
- ``p_BucketColumns``: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-properties-glue-table-storagedescriptor.html#cfn-glue-table-storagedescriptor-bucketcolumns
- ``p_Columns``: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-properties-glue-table-storagedescriptor.html#cfn-glue-table-storagedescriptor-columns
- ``p_Compressed``: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-properties-glue-table-storagedescriptor.html#cfn-glue-table-storagedescriptor-compressed
- ``p_InputFormat``: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-properties-glue-table-storagedescriptor.html#cfn-glue-table-storagedescriptor-inputformat
- ``p_Location``: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-properties-glue-table-storagedescriptor.html#cfn-glue-table-storagedescriptor-location
- ``p_NumberOfBuckets``: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-properties-glue-table-storagedescriptor.html#cfn-glue-table-storagedescriptor-numberofbuckets
- ``p_OutputFormat``: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-properties-glue-table-storagedescriptor.html#cfn-glue-table-storagedescriptor-outputformat
- ``p_Parameters``: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-properties-glue-table-storagedescriptor.html#cfn-glue-table-storagedescriptor-parameters
- ``p_SchemaReference``: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-properties-glue-table-storagedescriptor.html#cfn-glue-table-storagedescriptor-schemareference
- ``p_SerdeInfo``: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-properties-glue-table-storagedescriptor.html#cfn-glue-table-storagedescriptor-serdeinfo
- ``p_SkewedInfo``: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-properties-glue-table-storagedescriptor.html#cfn-glue-table-storagedescriptor-skewedinfo
- ``p_SortColumns``: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-properties-glue-table-storagedescriptor.html#cfn-glue-table-storagedescriptor-sortcolumns
- ``p_StoredAsSubDirectories``: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-properties-glue-table-storagedescriptor.html#cfn-glue-table-storagedescriptor-storedassubdirectories
"""
AWS_OBJECT_TYPE = "AWS::Glue::Table.StorageDescriptor"
p_BucketColumns: typing.List[TypeHint.intrinsic_str] = attr.ib(
default=None,
validator=attr.validators.optional(attr.validators.deep_iterable(member_validator=attr.validators.instance_of(TypeCheck.intrinsic_str_type), iterable_validator=attr.validators.instance_of(list))),
metadata={AttrMeta.PROPERTY_NAME: "BucketColumns"},
)
"""Doc: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-properties-glue-table-storagedescriptor.html#cfn-glue-table-storagedescriptor-bucketcolumns"""
p_Columns: typing.List[typing.Union['TableColumn', dict]] = attr.ib(
default=None,
converter=TableColumn.from_list,
validator=attr.validators.optional(attr.validators.deep_iterable(member_validator=attr.validators.instance_of(TableColumn), iterable_validator=attr.validators.instance_of(list))),
metadata={AttrMeta.PROPERTY_NAME: "Columns"},
)
"""Doc: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-properties-glue-table-storagedescriptor.html#cfn-glue-table-storagedescriptor-columns"""
p_Compressed: bool = attr.ib(
default=None,
validator=attr.validators.optional(attr.validators.instance_of(bool)),
metadata={AttrMeta.PROPERTY_NAME: "Compressed"},
)
"""Doc: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-properties-glue-table-storagedescriptor.html#cfn-glue-table-storagedescriptor-compressed"""
p_InputFormat: TypeHint.intrinsic_str = attr.ib(
default=None,
validator=attr.validators.optional(attr.validators.instance_of(TypeCheck.intrinsic_str_type)),
metadata={AttrMeta.PROPERTY_NAME: "InputFormat"},
)
"""Doc: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-properties-glue-table-storagedescriptor.html#cfn-glue-table-storagedescriptor-inputformat"""
p_Location: TypeHint.intrinsic_str = attr.ib(
default=None,
validator=attr.validators.optional(attr.validators.instance_of(TypeCheck.intrinsic_str_type)),
metadata={AttrMeta.PROPERTY_NAME: "Location"},
)
"""Doc: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-properties-glue-table-storagedescriptor.html#cfn-glue-table-storagedescriptor-location"""
p_NumberOfBuckets: int = attr.ib(
default=None,
validator=attr.validators.optional(attr.validators.instance_of(int)),
metadata={AttrMeta.PROPERTY_NAME: "NumberOfBuckets"},
)
"""Doc: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-properties-glue-table-storagedescriptor.html#cfn-glue-table-storagedescriptor-numberofbuckets"""
p_OutputFormat: TypeHint.intrinsic_str = attr.ib(
default=None,
validator=attr.validators.optional(attr.validators.instance_of(TypeCheck.intrinsic_str_type)),
metadata={AttrMeta.PROPERTY_NAME: "OutputFormat"},
)
"""Doc: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-properties-glue-table-storagedescriptor.html#cfn-glue-table-storagedescriptor-outputformat"""
p_Parameters: dict = attr.ib(
default=None,
validator=attr.validators.optional(attr.validators.instance_of(dict)),
metadata={AttrMeta.PROPERTY_NAME: "Parameters"},
)
"""Doc: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-properties-glue-table-storagedescriptor.html#cfn-glue-table-storagedescriptor-parameters"""
p_SchemaReference: typing.Union['TableSchemaReference', dict] = attr.ib(
default=None,
converter=TableSchemaReference.from_dict,
validator=attr.validators.optional(attr.validators.instance_of(TableSchemaReference)),
metadata={AttrMeta.PROPERTY_NAME: "SchemaReference"},
)
"""Doc: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-properties-glue-table-storagedescriptor.html#cfn-glue-table-storagedescriptor-schemareference"""
p_SerdeInfo: typing.Union['TableSerdeInfo', dict] = attr.ib(
default=None,
converter=TableSerdeInfo.from_dict,
validator=attr.validators.optional(attr.validators.instance_of(TableSerdeInfo)),
metadata={AttrMeta.PROPERTY_NAME: "SerdeInfo"},
)
"""Doc: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-properties-glue-table-storagedescriptor.html#cfn-glue-table-storagedescriptor-serdeinfo"""
p_SkewedInfo: typing.Union['TableSkewedInfo', dict] = attr.ib(
default=None,
converter=TableSkewedInfo.from_dict,
validator=attr.validators.optional(attr.validators.instance_of(TableSkewedInfo)),
metadata={AttrMeta.PROPERTY_NAME: "SkewedInfo"},
)
"""Doc: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-properties-glue-table-storagedescriptor.html#cfn-glue-table-storagedescriptor-skewedinfo"""
p_SortColumns: typing.List[typing.Union['TableOrder', dict]] = attr.ib(
default=None,
converter=TableOrder.from_list,
validator=attr.validators.optional(attr.validators.deep_iterable(member_validator=attr.validators.instance_of(TableOrder), iterable_validator=attr.validators.instance_of(list))),
metadata={AttrMeta.PROPERTY_NAME: "SortColumns"},
)
"""Doc: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-properties-glue-table-storagedescriptor.html#cfn-glue-table-storagedescriptor-sortcolumns"""
p_StoredAsSubDirectories: bool = attr.ib(
default=None,
validator=attr.validators.optional(attr.validators.instance_of(bool)),
metadata={AttrMeta.PROPERTY_NAME: "StoredAsSubDirectories"},
)
"""Doc: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-properties-glue-table-storagedescriptor.html#cfn-glue-table-storagedescriptor-storedassubdirectories"""
@attr.s
class PartitionPartitionInput(Property):
"""
AWS Object Type = "AWS::Glue::Partition.PartitionInput"
Resource Document: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-properties-glue-partition-partitioninput.html
Property Document:
- ``rp_Values``: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-properties-glue-partition-partitioninput.html#cfn-glue-partition-partitioninput-values
- ``p_Parameters``: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-properties-glue-partition-partitioninput.html#cfn-glue-partition-partitioninput-parameters
- ``p_StorageDescriptor``: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-properties-glue-partition-partitioninput.html#cfn-glue-partition-partitioninput-storagedescriptor
"""
AWS_OBJECT_TYPE = "AWS::Glue::Partition.PartitionInput"
rp_Values: typing.List[TypeHint.intrinsic_str] = attr.ib(
default=None,
validator=attr.validators.deep_iterable(member_validator=attr.validators.instance_of(TypeCheck.intrinsic_str_type), iterable_validator=attr.validators.instance_of(list)),
metadata={AttrMeta.PROPERTY_NAME: "Values"},
)
"""Doc: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-properties-glue-partition-partitioninput.html#cfn-glue-partition-partitioninput-values"""
p_Parameters: dict = attr.ib(
default=None,
validator=attr.validators.optional(attr.validators.instance_of(dict)),
metadata={AttrMeta.PROPERTY_NAME: "Parameters"},
)
"""Doc: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-properties-glue-partition-partitioninput.html#cfn-glue-partition-partitioninput-parameters"""
p_StorageDescriptor: typing.Union['PartitionStorageDescriptor', dict] = attr.ib(
default=None,
converter=PartitionStorageDescriptor.from_dict,
validator=attr.validators.optional(attr.validators.instance_of(PartitionStorageDescriptor)),
metadata={AttrMeta.PROPERTY_NAME: "StorageDescriptor"},
)
"""Doc: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-properties-glue-partition-partitioninput.html#cfn-glue-partition-partitioninput-storagedescriptor"""
@attr.s
class TableTableInput(Property):
"""
AWS Object Type = "AWS::Glue::Table.TableInput"
Resource Document: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-properties-glue-table-tableinput.html
Property Document:
- ``p_Description``: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-properties-glue-table-tableinput.html#cfn-glue-table-tableinput-description
- ``p_Name``: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-properties-glue-table-tableinput.html#cfn-glue-table-tableinput-name
- ``p_Owner``: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-properties-glue-table-tableinput.html#cfn-glue-table-tableinput-owner
- ``p_Parameters``: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-properties-glue-table-tableinput.html#cfn-glue-table-tableinput-parameters
- ``p_PartitionKeys``: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-properties-glue-table-tableinput.html#cfn-glue-table-tableinput-partitionkeys
- ``p_Retention``: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-properties-glue-table-tableinput.html#cfn-glue-table-tableinput-retention
- ``p_StorageDescriptor``: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-properties-glue-table-tableinput.html#cfn-glue-table-tableinput-storagedescriptor
- ``p_TableType``: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-properties-glue-table-tableinput.html#cfn-glue-table-tableinput-tabletype
- ``p_TargetTable``: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-properties-glue-table-tableinput.html#cfn-glue-table-tableinput-targettable
- ``p_ViewExpandedText``: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-properties-glue-table-tableinput.html#cfn-glue-table-tableinput-viewexpandedtext
- ``p_ViewOriginalText``: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-properties-glue-table-tableinput.html#cfn-glue-table-tableinput-vieworiginaltext
"""
AWS_OBJECT_TYPE = "AWS::Glue::Table.TableInput"
p_Description: TypeHint.intrinsic_str = attr.ib(
default=None,
validator=attr.validators.optional(attr.validators.instance_of(TypeCheck.intrinsic_str_type)),
metadata={AttrMeta.PROPERTY_NAME: "Description"},
)
"""Doc: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-properties-glue-table-tableinput.html#cfn-glue-table-tableinput-description"""
p_Name: TypeHint.intrinsic_str = attr.ib(
default=None,
validator=attr.validators.optional(attr.validators.instance_of(TypeCheck.intrinsic_str_type)),
metadata={AttrMeta.PROPERTY_NAME: "Name"},
)
"""Doc: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-properties-glue-table-tableinput.html#cfn-glue-table-tableinput-name"""
p_Owner: TypeHint.intrinsic_str = attr.ib(
default=None,
validator=attr.validators.optional(attr.validators.instance_of(TypeCheck.intrinsic_str_type)),
metadata={AttrMeta.PROPERTY_NAME: "Owner"},
)
"""Doc: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-properties-glue-table-tableinput.html#cfn-glue-table-tableinput-owner"""
p_Parameters: dict = attr.ib(
default=None,
validator=attr.validators.optional(attr.validators.instance_of(dict)),
metadata={AttrMeta.PROPERTY_NAME: "Parameters"},
)
"""Doc: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-properties-glue-table-tableinput.html#cfn-glue-table-tableinput-parameters"""
p_PartitionKeys: typing.List[typing.Union['TableColumn', dict]] = attr.ib(
default=None,
converter=TableColumn.from_list,
validator=attr.validators.optional(attr.validators.deep_iterable(member_validator=attr.validators.instance_of(TableColumn), iterable_validator=attr.validators.instance_of(list))),
metadata={AttrMeta.PROPERTY_NAME: "PartitionKeys"},
)
"""Doc: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-properties-glue-table-tableinput.html#cfn-glue-table-tableinput-partitionkeys"""
p_Retention: int = attr.ib(
default=None,
validator=attr.validators.optional(attr.validators.instance_of(int)),
metadata={AttrMeta.PROPERTY_NAME: "Retention"},
)
"""Doc: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-properties-glue-table-tableinput.html#cfn-glue-table-tableinput-retention"""
p_StorageDescriptor: typing.Union['TableStorageDescriptor', dict] = attr.ib(
default=None,
converter=TableStorageDescriptor.from_dict,
validator=attr.validators.optional(attr.validators.instance_of(TableStorageDescriptor)),
metadata={AttrMeta.PROPERTY_NAME: "StorageDescriptor"},
)
"""Doc: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-properties-glue-table-tableinput.html#cfn-glue-table-tableinput-storagedescriptor"""
p_TableType: TypeHint.intrinsic_str = attr.ib(
default=None,
validator=attr.validators.optional(attr.validators.instance_of(TypeCheck.intrinsic_str_type)),
metadata={AttrMeta.PROPERTY_NAME: "TableType"},
)
"""Doc: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-properties-glue-table-tableinput.html#cfn-glue-table-tableinput-tabletype"""
p_TargetTable: typing.Union['TableTableIdentifier', dict] = attr.ib(
default=None,
converter=TableTableIdentifier.from_dict,
validator=attr.validators.optional(attr.validators.instance_of(TableTableIdentifier)),
metadata={AttrMeta.PROPERTY_NAME: "TargetTable"},
)
"""Doc: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-properties-glue-table-tableinput.html#cfn-glue-table-tableinput-targettable"""
p_ViewExpandedText: TypeHint.intrinsic_str = attr.ib(
default=None,
validator=attr.validators.optional(attr.validators.instance_of(TypeCheck.intrinsic_str_type)),
metadata={AttrMeta.PROPERTY_NAME: "ViewExpandedText"},
)
"""Doc: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-properties-glue-table-tableinput.html#cfn-glue-table-tableinput-viewexpandedtext"""
p_ViewOriginalText: TypeHint.intrinsic_str = attr.ib(
default=None,
validator=attr.validators.optional(attr.validators.instance_of(TypeCheck.intrinsic_str_type)),
metadata={AttrMeta.PROPERTY_NAME: "ViewOriginalText"},
)
"""Doc: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-properties-glue-table-tableinput.html#cfn-glue-table-tableinput-vieworiginaltext"""
#--- Resource declaration ---
@attr.s
class SchemaVersion(Resource):
"""
AWS Object Type = "AWS::Glue::SchemaVersion"
Resource Document: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-resource-glue-schemaversion.html
Property Document:
- ``rp_Schema``: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-resource-glue-schemaversion.html#cfn-glue-schemaversion-schema
- ``rp_SchemaDefinition``: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-resource-glue-schemaversion.html#cfn-glue-schemaversion-schemadefinition
"""
AWS_OBJECT_TYPE = "AWS::Glue::SchemaVersion"
rp_Schema: typing.Union['SchemaVersionSchema', dict] = attr.ib(
default=None,
converter=SchemaVersionSchema.from_dict,
validator=attr.validators.instance_of(SchemaVersionSchema),
metadata={AttrMeta.PROPERTY_NAME: "Schema"},
)
"""Doc: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-resource-glue-schemaversion.html#cfn-glue-schemaversion-schema"""
rp_SchemaDefinition: TypeHint.intrinsic_str = attr.ib(
default=None,
validator=attr.validators.instance_of(TypeCheck.intrinsic_str_type),
metadata={AttrMeta.PROPERTY_NAME: "SchemaDefinition"},
)
"""Doc: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-resource-glue-schemaversion.html#cfn-glue-schemaversion-schemadefinition"""
@property
def rv_VersionId(self) -> GetAtt:
"""Doc: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-resource-glue-schemaversion.html#aws-resource-glue-schemaversion-return-values"""
return GetAtt(resource=self, attr_name="VersionId")
@attr.s
class DataCatalogEncryptionSettings(Resource):
"""
AWS Object Type = "AWS::Glue::DataCatalogEncryptionSettings"
Resource Document: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-resource-glue-datacatalogencryptionsettings.html
Property Document:
- ``rp_CatalogId``: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-resource-glue-datacatalogencryptionsettings.html#cfn-glue-datacatalogencryptionsettings-catalogid
- ``rp_DataCatalogEncryptionSettings``: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-resource-glue-datacatalogencryptionsettings.html#cfn-glue-datacatalogencryptionsettings-datacatalogencryptionsettings
"""
AWS_OBJECT_TYPE = "AWS::Glue::DataCatalogEncryptionSettings"
rp_CatalogId: TypeHint.intrinsic_str = attr.ib(
default=None,
validator=attr.validators.instance_of(TypeCheck.intrinsic_str_type),
metadata={AttrMeta.PROPERTY_NAME: "CatalogId"},
)
"""Doc: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-resource-glue-datacatalogencryptionsettings.html#cfn-glue-datacatalogencryptionsettings-catalogid"""
rp_DataCatalogEncryptionSettings: typing.Union['DataCatalogEncryptionSettingsDataCatalogEncryptionSettings', dict] = attr.ib(
default=None,
converter=DataCatalogEncryptionSettingsDataCatalogEncryptionSettings.from_dict,
validator=attr.validators.instance_of(DataCatalogEncryptionSettingsDataCatalogEncryptionSettings),
metadata={AttrMeta.PROPERTY_NAME: "DataCatalogEncryptionSettings"},
)
"""Doc: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-resource-glue-datacatalogencryptionsettings.html#cfn-glue-datacatalogencryptionsettings-datacatalogencryptionsettings"""
@attr.s
class Workflow(Resource):
"""
AWS Object Type = "AWS::Glue::Workflow"
Resource Document: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-resource-glue-workflow.html
Property Document:
- ``p_DefaultRunProperties``: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-resource-glue-workflow.html#cfn-glue-workflow-defaultrunproperties
- ``p_Description``: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-resource-glue-workflow.html#cfn-glue-workflow-description
- ``p_Name``: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-resource-glue-workflow.html#cfn-glue-workflow-name
- ``p_Tags``: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-resource-glue-workflow.html#cfn-glue-workflow-tags
"""
AWS_OBJECT_TYPE = "AWS::Glue::Workflow"
p_DefaultRunProperties: dict = attr.ib(
default=None,
validator=attr.validators.optional(attr.validators.instance_of(dict)),
metadata={AttrMeta.PROPERTY_NAME: "DefaultRunProperties"},
)
"""Doc: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-resource-glue-workflow.html#cfn-glue-workflow-defaultrunproperties"""
p_Description: TypeHint.intrinsic_str = attr.ib(
default=None,
validator=attr.validators.optional(attr.validators.instance_of(TypeCheck.intrinsic_str_type)),
metadata={AttrMeta.PROPERTY_NAME: "Description"},
)
"""Doc: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-resource-glue-workflow.html#cfn-glue-workflow-description"""
p_Name: TypeHint.intrinsic_str = attr.ib(
default=None,
validator=attr.validators.optional(attr.validators.instance_of(TypeCheck.intrinsic_str_type)),
metadata={AttrMeta.PROPERTY_NAME: "Name"},
)
"""Doc: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-resource-glue-workflow.html#cfn-glue-workflow-name"""
p_Tags: dict = attr.ib(
default=None,
validator=attr.validators.optional(attr.validators.instance_of(dict)),
metadata={AttrMeta.PROPERTY_NAME: "Tags"},
)
"""Doc: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-resource-glue-workflow.html#cfn-glue-workflow-tags"""
@attr.s
class Job(Resource):
"""
AWS Object Type = "AWS::Glue::Job"
Resource Document: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-resource-glue-job.html
Property Document:
- ``rp_Command``: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-resource-glue-job.html#cfn-glue-job-command
- ``rp_Role``: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-resource-glue-job.html#cfn-glue-job-role
- ``p_AllocatedCapacity``: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-resource-glue-job.html#cfn-glue-job-allocatedcapacity
- ``p_Connections``: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-resource-glue-job.html#cfn-glue-job-connections
- ``p_DefaultArguments``: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-resource-glue-job.html#cfn-glue-job-defaultarguments
- ``p_Description``: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-resource-glue-job.html#cfn-glue-job-description
- ``p_ExecutionProperty``: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-resource-glue-job.html#cfn-glue-job-executionproperty
- ``p_GlueVersion``: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-resource-glue-job.html#cfn-glue-job-glueversion
- ``p_LogUri``: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-resource-glue-job.html#cfn-glue-job-loguri
- ``p_MaxCapacity``: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-resource-glue-job.html#cfn-glue-job-maxcapacity
- ``p_MaxRetries``: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-resource-glue-job.html#cfn-glue-job-maxretries
- ``p_Name``: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-resource-glue-job.html#cfn-glue-job-name
- ``p_NotificationProperty``: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-resource-glue-job.html#cfn-glue-job-notificationproperty
- ``p_NumberOfWorkers``: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-resource-glue-job.html#cfn-glue-job-numberofworkers
- ``p_SecurityConfiguration``: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-resource-glue-job.html#cfn-glue-job-securityconfiguration
- ``p_Timeout``: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-resource-glue-job.html#cfn-glue-job-timeout
- ``p_WorkerType``: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-resource-glue-job.html#cfn-glue-job-workertype
- ``p_Tags``: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-resource-glue-job.html#cfn-glue-job-tags
"""
AWS_OBJECT_TYPE = "AWS::Glue::Job"
rp_Command: typing.Union['JobJobCommand', dict] = attr.ib(
default=None,
converter=JobJobCommand.from_dict,
validator=attr.validators.instance_of(JobJobCommand),
metadata={AttrMeta.PROPERTY_NAME: "Command"},
)
"""Doc: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-resource-glue-job.html#cfn-glue-job-command"""
rp_Role: TypeHint.intrinsic_str = attr.ib(
default=None,
validator=attr.validators.instance_of(TypeCheck.intrinsic_str_type),
metadata={AttrMeta.PROPERTY_NAME: "Role"},
)
"""Doc: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-resource-glue-job.html#cfn-glue-job-role"""
p_AllocatedCapacity: float = attr.ib(
default=None,
validator=attr.validators.optional(attr.validators.instance_of(float)),
metadata={AttrMeta.PROPERTY_NAME: "AllocatedCapacity"},
)
"""Doc: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-resource-glue-job.html#cfn-glue-job-allocatedcapacity"""
p_Connections: typing.Union['JobConnectionsList', dict] = attr.ib(
default=None,
converter=JobConnectionsList.from_dict,
validator=attr.validators.optional(attr.validators.instance_of(JobConnectionsList)),
metadata={AttrMeta.PROPERTY_NAME: "Connections"},
)
"""Doc: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-resource-glue-job.html#cfn-glue-job-connections"""
p_DefaultArguments: dict = attr.ib(
default=None,
validator=attr.validators.optional(attr.validators.instance_of(dict)),
metadata={AttrMeta.PROPERTY_NAME: "DefaultArguments"},
)
"""Doc: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-resource-glue-job.html#cfn-glue-job-defaultarguments"""
p_Description: TypeHint.intrinsic_str = attr.ib(
default=None,
validator=attr.validators.optional(attr.validators.instance_of(TypeCheck.intrinsic_str_type)),
metadata={AttrMeta.PROPERTY_NAME: "Description"},
)
"""Doc: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-resource-glue-job.html#cfn-glue-job-description"""
p_ExecutionProperty: typing.Union['JobExecutionProperty', dict] = attr.ib(
default=None,
converter=JobExecutionProperty.from_dict,
validator=attr.validators.optional(attr.validators.instance_of(JobExecutionProperty)),
metadata={AttrMeta.PROPERTY_NAME: "ExecutionProperty"},
)
"""Doc: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-resource-glue-job.html#cfn-glue-job-executionproperty"""
p_GlueVersion: TypeHint.intrinsic_str = attr.ib(
default=None,
validator=attr.validators.optional(attr.validators.instance_of(TypeCheck.intrinsic_str_type)),
metadata={AttrMeta.PROPERTY_NAME: "GlueVersion"},
)
"""Doc: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-resource-glue-job.html#cfn-glue-job-glueversion"""
p_LogUri: TypeHint.intrinsic_str = attr.ib(
default=None,
validator=attr.validators.optional(attr.validators.instance_of(TypeCheck.intrinsic_str_type)),
metadata={AttrMeta.PROPERTY_NAME: "LogUri"},
)
"""Doc: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-resource-glue-job.html#cfn-glue-job-loguri"""
p_MaxCapacity: float = attr.ib(
default=None,
validator=attr.validators.optional(attr.validators.instance_of(float)),
metadata={AttrMeta.PROPERTY_NAME: "MaxCapacity"},
)
"""Doc: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-resource-glue-job.html#cfn-glue-job-maxcapacity"""
p_MaxRetries: float = attr.ib(
default=None,
validator=attr.validators.optional(attr.validators.instance_of(float)),
metadata={AttrMeta.PROPERTY_NAME: "MaxRetries"},
)
"""Doc: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-resource-glue-job.html#cfn-glue-job-maxretries"""
p_Name: TypeHint.intrinsic_str = attr.ib(
default=None,
validator=attr.validators.optional(attr.validators.instance_of(TypeCheck.intrinsic_str_type)),
metadata={AttrMeta.PROPERTY_NAME: "Name"},
)
"""Doc: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-resource-glue-job.html#cfn-glue-job-name"""
p_NotificationProperty: typing.Union['JobNotificationProperty', dict] = attr.ib(
default=None,
converter=JobNotificationProperty.from_dict,
validator=attr.validators.optional(attr.validators.instance_of(JobNotificationProperty)),
metadata={AttrMeta.PROPERTY_NAME: "NotificationProperty"},
)
"""Doc: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-resource-glue-job.html#cfn-glue-job-notificationproperty"""
p_NumberOfWorkers: int = attr.ib(
default=None,
validator=attr.validators.optional(attr.validators.instance_of(int)),
metadata={AttrMeta.PROPERTY_NAME: "NumberOfWorkers"},
)
"""Doc: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-resource-glue-job.html#cfn-glue-job-numberofworkers"""
p_SecurityConfiguration: TypeHint.intrinsic_str = attr.ib(
default=None,
validator=attr.validators.optional(attr.validators.instance_of(TypeCheck.intrinsic_str_type)),
metadata={AttrMeta.PROPERTY_NAME: "SecurityConfiguration"},
)
"""Doc: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-resource-glue-job.html#cfn-glue-job-securityconfiguration"""
p_Timeout: int = attr.ib(
default=None,
validator=attr.validators.optional(attr.validators.instance_of(int)),
metadata={AttrMeta.PROPERTY_NAME: "Timeout"},
)
"""Doc: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-resource-glue-job.html#cfn-glue-job-timeout"""
p_WorkerType: TypeHint.intrinsic_str = attr.ib(
default=None,
validator=attr.validators.optional(attr.validators.instance_of(TypeCheck.intrinsic_str_type)),
metadata={AttrMeta.PROPERTY_NAME: "WorkerType"},
)
"""Doc: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-resource-glue-job.html#cfn-glue-job-workertype"""
p_Tags: dict = attr.ib(
default=None,
validator=attr.validators.optional(attr.validators.instance_of(dict)),
metadata={AttrMeta.PROPERTY_NAME: "Tags"},
)
"""Doc: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-resource-glue-job.html#cfn-glue-job-tags"""
@attr.s
class Database(Resource):
"""
AWS Object Type = "AWS::Glue::Database"
Resource Document: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-resource-glue-database.html
Property Document:
- ``rp_CatalogId``: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-resource-glue-database.html#cfn-glue-database-catalogid
- ``rp_DatabaseInput``: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-resource-glue-database.html#cfn-glue-database-databaseinput
"""
AWS_OBJECT_TYPE = "AWS::Glue::Database"
rp_CatalogId: TypeHint.intrinsic_str = attr.ib(
default=None,
validator=attr.validators.instance_of(TypeCheck.intrinsic_str_type),
metadata={AttrMeta.PROPERTY_NAME: "CatalogId"},
)
"""Doc: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-resource-glue-database.html#cfn-glue-database-catalogid"""
rp_DatabaseInput: typing.Union['DatabaseDatabaseInput', dict] = attr.ib(
default=None,
converter=DatabaseDatabaseInput.from_dict,
validator=attr.validators.instance_of(DatabaseDatabaseInput),
metadata={AttrMeta.PROPERTY_NAME: "DatabaseInput"},
)
"""Doc: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-resource-glue-database.html#cfn-glue-database-databaseinput"""
@attr.s
class DevEndpoint(Resource):
"""
AWS Object Type = "AWS::Glue::DevEndpoint"
Resource Document: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-resource-glue-devendpoint.html
Property Document:
- ``rp_RoleArn``: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-resource-glue-devendpoint.html#cfn-glue-devendpoint-rolearn
- ``p_Arguments``: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-resource-glue-devendpoint.html#cfn-glue-devendpoint-arguments
- ``p_EndpointName``: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-resource-glue-devendpoint.html#cfn-glue-devendpoint-endpointname
- ``p_ExtraJarsS3Path``: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-resource-glue-devendpoint.html#cfn-glue-devendpoint-extrajarss3path
- ``p_ExtraPythonLibsS3Path``: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-resource-glue-devendpoint.html#cfn-glue-devendpoint-extrapythonlibss3path
- ``p_GlueVersion``: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-resource-glue-devendpoint.html#cfn-glue-devendpoint-glueversion
- ``p_NumberOfNodes``: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-resource-glue-devendpoint.html#cfn-glue-devendpoint-numberofnodes
- ``p_NumberOfWorkers``: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-resource-glue-devendpoint.html#cfn-glue-devendpoint-numberofworkers
- ``p_PublicKey``: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-resource-glue-devendpoint.html#cfn-glue-devendpoint-publickey
- ``p_PublicKeys``: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-resource-glue-devendpoint.html#cfn-glue-devendpoint-publickeys
- ``p_SecurityConfiguration``: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-resource-glue-devendpoint.html#cfn-glue-devendpoint-securityconfiguration
- ``p_SecurityGroupIds``: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-resource-glue-devendpoint.html#cfn-glue-devendpoint-securitygroupids
- ``p_SubnetId``: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-resource-glue-devendpoint.html#cfn-glue-devendpoint-subnetid
- ``p_WorkerType``: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-resource-glue-devendpoint.html#cfn-glue-devendpoint-workertype
- ``p_Tags``: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-resource-glue-devendpoint.html#cfn-glue-devendpoint-tags
"""
AWS_OBJECT_TYPE = "AWS::Glue::DevEndpoint"
rp_RoleArn: TypeHint.intrinsic_str = attr.ib(
default=None,
validator=attr.validators.instance_of(TypeCheck.intrinsic_str_type),
metadata={AttrMeta.PROPERTY_NAME: "RoleArn"},
)
"""Doc: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-resource-glue-devendpoint.html#cfn-glue-devendpoint-rolearn"""
p_Arguments: dict = attr.ib(
default=None,
validator=attr.validators.optional(attr.validators.instance_of(dict)),
metadata={AttrMeta.PROPERTY_NAME: "Arguments"},
)
"""Doc: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-resource-glue-devendpoint.html#cfn-glue-devendpoint-arguments"""
p_EndpointName: TypeHint.intrinsic_str = attr.ib(
default=None,
validator=attr.validators.optional(attr.validators.instance_of(TypeCheck.intrinsic_str_type)),
metadata={AttrMeta.PROPERTY_NAME: "EndpointName"},
)
"""Doc: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-resource-glue-devendpoint.html#cfn-glue-devendpoint-endpointname"""
p_ExtraJarsS3Path: TypeHint.intrinsic_str = attr.ib(
default=None,
validator=attr.validators.optional(attr.validators.instance_of(TypeCheck.intrinsic_str_type)),
metadata={AttrMeta.PROPERTY_NAME: "ExtraJarsS3Path"},
)
"""Doc: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-resource-glue-devendpoint.html#cfn-glue-devendpoint-extrajarss3path"""
p_ExtraPythonLibsS3Path: TypeHint.intrinsic_str = attr.ib(
default=None,
validator=attr.validators.optional(attr.validators.instance_of(TypeCheck.intrinsic_str_type)),
metadata={AttrMeta.PROPERTY_NAME: "ExtraPythonLibsS3Path"},
)
"""Doc: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-resource-glue-devendpoint.html#cfn-glue-devendpoint-extrapythonlibss3path"""
p_GlueVersion: TypeHint.intrinsic_str = attr.ib(
default=None,
validator=attr.validators.optional(attr.validators.instance_of(TypeCheck.intrinsic_str_type)),
metadata={AttrMeta.PROPERTY_NAME: "GlueVersion"},
)
"""Doc: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-resource-glue-devendpoint.html#cfn-glue-devendpoint-glueversion"""
p_NumberOfNodes: int = attr.ib(
default=None,
validator=attr.validators.optional(attr.validators.instance_of(int)),
metadata={AttrMeta.PROPERTY_NAME: "NumberOfNodes"},
)
"""Doc: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-resource-glue-devendpoint.html#cfn-glue-devendpoint-numberofnodes"""
p_NumberOfWorkers: int = attr.ib(
default=None,
validator=attr.validators.optional(attr.validators.instance_of(int)),
metadata={AttrMeta.PROPERTY_NAME: "NumberOfWorkers"},
)
"""Doc: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-resource-glue-devendpoint.html#cfn-glue-devendpoint-numberofworkers"""
p_PublicKey: TypeHint.intrinsic_str = attr.ib(
default=None,
validator=attr.validators.optional(attr.validators.instance_of(TypeCheck.intrinsic_str_type)),
metadata={AttrMeta.PROPERTY_NAME: "PublicKey"},
)
"""Doc: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-resource-glue-devendpoint.html#cfn-glue-devendpoint-publickey"""
p_PublicKeys: typing.List[TypeHint.intrinsic_str] = attr.ib(
default=None,
validator=attr.validators.optional(attr.validators.deep_iterable(member_validator=attr.validators.instance_of(TypeCheck.intrinsic_str_type), iterable_validator=attr.validators.instance_of(list))),
metadata={AttrMeta.PROPERTY_NAME: "PublicKeys"},
)
"""Doc: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-resource-glue-devendpoint.html#cfn-glue-devendpoint-publickeys"""
p_SecurityConfiguration: TypeHint.intrinsic_str = attr.ib(
default=None,
validator=attr.validators.optional(attr.validators.instance_of(TypeCheck.intrinsic_str_type)),
metadata={AttrMeta.PROPERTY_NAME: "SecurityConfiguration"},
)
"""Doc: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-resource-glue-devendpoint.html#cfn-glue-devendpoint-securityconfiguration"""
p_SecurityGroupIds: typing.List[TypeHint.intrinsic_str] = attr.ib(
default=None,
validator=attr.validators.optional(attr.validators.deep_iterable(member_validator=attr.validators.instance_of(TypeCheck.intrinsic_str_type), iterable_validator=attr.validators.instance_of(list))),
metadata={AttrMeta.PROPERTY_NAME: "SecurityGroupIds"},
)
"""Doc: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-resource-glue-devendpoint.html#cfn-glue-devendpoint-securitygroupids"""
p_SubnetId: TypeHint.intrinsic_str = attr.ib(
default=None,
validator=attr.validators.optional(attr.validators.instance_of(TypeCheck.intrinsic_str_type)),
metadata={AttrMeta.PROPERTY_NAME: "SubnetId"},
)
"""Doc: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-resource-glue-devendpoint.html#cfn-glue-devendpoint-subnetid"""
p_WorkerType: TypeHint.intrinsic_str = attr.ib(
default=None,
validator=attr.validators.optional(attr.validators.instance_of(TypeCheck.intrinsic_str_type)),
metadata={AttrMeta.PROPERTY_NAME: "WorkerType"},
)
"""Doc: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-resource-glue-devendpoint.html#cfn-glue-devendpoint-workertype"""
p_Tags: dict = attr.ib(
default=None,
validator=attr.validators.optional(attr.validators.instance_of(dict)),
metadata={AttrMeta.PROPERTY_NAME: "Tags"},
)
"""Doc: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-resource-glue-devendpoint.html#cfn-glue-devendpoint-tags"""
@attr.s
class SchemaVersionMetadata(Resource):
"""
AWS Object Type = "AWS::Glue::SchemaVersionMetadata"
Resource Document: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-resource-glue-schemaversionmetadata.html
Property Document:
- ``rp_Key``: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-resource-glue-schemaversionmetadata.html#cfn-glue-schemaversionmetadata-key
- ``rp_SchemaVersionId``: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-resource-glue-schemaversionmetadata.html#cfn-glue-schemaversionmetadata-schemaversionid
- ``rp_Value``: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-resource-glue-schemaversionmetadata.html#cfn-glue-schemaversionmetadata-value
"""
AWS_OBJECT_TYPE = "AWS::Glue::SchemaVersionMetadata"
rp_Key: TypeHint.intrinsic_str = attr.ib(
default=None,
validator=attr.validators.instance_of(TypeCheck.intrinsic_str_type),
metadata={AttrMeta.PROPERTY_NAME: "Key"},
)
"""Doc: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-resource-glue-schemaversionmetadata.html#cfn-glue-schemaversionmetadata-key"""
rp_SchemaVersionId: TypeHint.intrinsic_str = attr.ib(
default=None,
validator=attr.validators.instance_of(TypeCheck.intrinsic_str_type),
metadata={AttrMeta.PROPERTY_NAME: "SchemaVersionId"},
)
"""Doc: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-resource-glue-schemaversionmetadata.html#cfn-glue-schemaversionmetadata-schemaversionid"""
rp_Value: TypeHint.intrinsic_str = attr.ib(
default=None,
validator=attr.validators.instance_of(TypeCheck.intrinsic_str_type),
metadata={AttrMeta.PROPERTY_NAME: "Value"},
)
"""Doc: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-resource-glue-schemaversionmetadata.html#cfn-glue-schemaversionmetadata-value"""
@attr.s
class Crawler(Resource):
"""
AWS Object Type = "AWS::Glue::Crawler"
Resource Document: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-resource-glue-crawler.html
Property Document:
- ``rp_Role``: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-resource-glue-crawler.html#cfn-glue-crawler-role
- ``rp_Targets``: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-resource-glue-crawler.html#cfn-glue-crawler-targets
- ``p_Classifiers``: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-resource-glue-crawler.html#cfn-glue-crawler-classifiers
- ``p_Configuration``: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-resource-glue-crawler.html#cfn-glue-crawler-configuration
- ``p_CrawlerSecurityConfiguration``: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-resource-glue-crawler.html#cfn-glue-crawler-crawlersecurityconfiguration
- ``p_DatabaseName``: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-resource-glue-crawler.html#cfn-glue-crawler-databasename
- ``p_Description``: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-resource-glue-crawler.html#cfn-glue-crawler-description
- ``p_Name``: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-resource-glue-crawler.html#cfn-glue-crawler-name
- ``p_Schedule``: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-resource-glue-crawler.html#cfn-glue-crawler-schedule
- ``p_SchemaChangePolicy``: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-resource-glue-crawler.html#cfn-glue-crawler-schemachangepolicy
- ``p_TablePrefix``: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-resource-glue-crawler.html#cfn-glue-crawler-tableprefix
- ``p_Tags``: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-resource-glue-crawler.html#cfn-glue-crawler-tags
"""
AWS_OBJECT_TYPE = "AWS::Glue::Crawler"
rp_Role: TypeHint.intrinsic_str = attr.ib(
default=None,
validator=attr.validators.instance_of(TypeCheck.intrinsic_str_type),
metadata={AttrMeta.PROPERTY_NAME: "Role"},
)
"""Doc: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-resource-glue-crawler.html#cfn-glue-crawler-role"""
rp_Targets: typing.Union['CrawlerTargets', dict] = attr.ib(
default=None,
converter=CrawlerTargets.from_dict,
validator=attr.validators.instance_of(CrawlerTargets),
metadata={AttrMeta.PROPERTY_NAME: "Targets"},
)
"""Doc: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-resource-glue-crawler.html#cfn-glue-crawler-targets"""
p_Classifiers: typing.List[TypeHint.intrinsic_str] = attr.ib(
default=None,
validator=attr.validators.optional(attr.validators.deep_iterable(member_validator=attr.validators.instance_of(TypeCheck.intrinsic_str_type), iterable_validator=attr.validators.instance_of(list))),
metadata={AttrMeta.PROPERTY_NAME: "Classifiers"},
)
"""Doc: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-resource-glue-crawler.html#cfn-glue-crawler-classifiers"""
p_Configuration: TypeHint.intrinsic_str = attr.ib(
default=None,
validator=attr.validators.optional(attr.validators.instance_of(TypeCheck.intrinsic_str_type)),
metadata={AttrMeta.PROPERTY_NAME: "Configuration"},
)
"""Doc: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-resource-glue-crawler.html#cfn-glue-crawler-configuration"""
p_CrawlerSecurityConfiguration: TypeHint.intrinsic_str = attr.ib(
default=None,
validator=attr.validators.optional(attr.validators.instance_of(TypeCheck.intrinsic_str_type)),
metadata={AttrMeta.PROPERTY_NAME: "CrawlerSecurityConfiguration"},
)
"""Doc: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-resource-glue-crawler.html#cfn-glue-crawler-crawlersecurityconfiguration"""
p_DatabaseName: TypeHint.intrinsic_str = attr.ib(
default=None,
validator=attr.validators.optional(attr.validators.instance_of(TypeCheck.intrinsic_str_type)),
metadata={AttrMeta.PROPERTY_NAME: "DatabaseName"},
)
"""Doc: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-resource-glue-crawler.html#cfn-glue-crawler-databasename"""
p_Description: TypeHint.intrinsic_str = attr.ib(
default=None,
validator=attr.validators.optional(attr.validators.instance_of(TypeCheck.intrinsic_str_type)),
metadata={AttrMeta.PROPERTY_NAME: "Description"},
)
"""Doc: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-resource-glue-crawler.html#cfn-glue-crawler-description"""
p_Name: TypeHint.intrinsic_str = attr.ib(
default=None,
validator=attr.validators.optional(attr.validators.instance_of(TypeCheck.intrinsic_str_type)),
metadata={AttrMeta.PROPERTY_NAME: "Name"},
)
"""Doc: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-resource-glue-crawler.html#cfn-glue-crawler-name"""
p_Schedule: typing.Union['CrawlerSchedule', dict] = attr.ib(
default=None,
converter=CrawlerSchedule.from_dict,
validator=attr.validators.optional(attr.validators.instance_of(CrawlerSchedule)),
metadata={AttrMeta.PROPERTY_NAME: "Schedule"},
)
"""Doc: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-resource-glue-crawler.html#cfn-glue-crawler-schedule"""
p_SchemaChangePolicy: typing.Union['CrawlerSchemaChangePolicy', dict] = attr.ib(
default=None,
converter=CrawlerSchemaChangePolicy.from_dict,
validator=attr.validators.optional(attr.validators.instance_of(CrawlerSchemaChangePolicy)),
metadata={AttrMeta.PROPERTY_NAME: "SchemaChangePolicy"},
)
"""Doc: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-resource-glue-crawler.html#cfn-glue-crawler-schemachangepolicy"""
p_TablePrefix: TypeHint.intrinsic_str = attr.ib(
default=None,
validator=attr.validators.optional(attr.validators.instance_of(TypeCheck.intrinsic_str_type)),
metadata={AttrMeta.PROPERTY_NAME: "TablePrefix"},
)
"""Doc: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-resource-glue-crawler.html#cfn-glue-crawler-tableprefix"""
p_Tags: dict = attr.ib(
default=None,
validator=attr.validators.optional(attr.validators.instance_of(dict)),
metadata={AttrMeta.PROPERTY_NAME: "Tags"},
)
"""Doc: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-resource-glue-crawler.html#cfn-glue-crawler-tags"""
@attr.s
class MLTransform(Resource):
"""
AWS Object Type = "AWS::Glue::MLTransform"
Resource Document: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-resource-glue-mltransform.html
Property Document:
- ``rp_InputRecordTables``: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-resource-glue-mltransform.html#cfn-glue-mltransform-inputrecordtables
- ``rp_Role``: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-resource-glue-mltransform.html#cfn-glue-mltransform-role
- ``rp_TransformParameters``: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-resource-glue-mltransform.html#cfn-glue-mltransform-transformparameters
- ``p_Description``: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-resource-glue-mltransform.html#cfn-glue-mltransform-description
- ``p_GlueVersion``: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-resource-glue-mltransform.html#cfn-glue-mltransform-glueversion
- ``p_MaxCapacity``: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-resource-glue-mltransform.html#cfn-glue-mltransform-maxcapacity
- ``p_MaxRetries``: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-resource-glue-mltransform.html#cfn-glue-mltransform-maxretries
- ``p_Name``: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-resource-glue-mltransform.html#cfn-glue-mltransform-name
- ``p_NumberOfWorkers``: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-resource-glue-mltransform.html#cfn-glue-mltransform-numberofworkers
- ``p_Timeout``: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-resource-glue-mltransform.html#cfn-glue-mltransform-timeout
- ``p_TransformEncryption``: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-resource-glue-mltransform.html#cfn-glue-mltransform-transformencryption
- ``p_WorkerType``: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-resource-glue-mltransform.html#cfn-glue-mltransform-workertype
- ``p_Tags``: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-resource-glue-mltransform.html#cfn-glue-mltransform-tags
"""
AWS_OBJECT_TYPE = "AWS::Glue::MLTransform"
rp_InputRecordTables: typing.Union['MLTransformInputRecordTables', dict] = attr.ib(
default=None,
converter=MLTransformInputRecordTables.from_dict,
validator=attr.validators.instance_of(MLTransformInputRecordTables),
metadata={AttrMeta.PROPERTY_NAME: "InputRecordTables"},
)
"""Doc: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-resource-glue-mltransform.html#cfn-glue-mltransform-inputrecordtables"""
rp_Role: TypeHint.intrinsic_str = attr.ib(
default=None,
validator=attr.validators.instance_of(TypeCheck.intrinsic_str_type),
metadata={AttrMeta.PROPERTY_NAME: "Role"},
)
"""Doc: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-resource-glue-mltransform.html#cfn-glue-mltransform-role"""
rp_TransformParameters: typing.Union['MLTransformTransformParameters', dict] = attr.ib(
default=None,
converter=MLTransformTransformParameters.from_dict,
validator=attr.validators.instance_of(MLTransformTransformParameters),
metadata={AttrMeta.PROPERTY_NAME: "TransformParameters"},
)
"""Doc: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-resource-glue-mltransform.html#cfn-glue-mltransform-transformparameters"""
p_Description: TypeHint.intrinsic_str = attr.ib(
default=None,
validator=attr.validators.optional(attr.validators.instance_of(TypeCheck.intrinsic_str_type)),
metadata={AttrMeta.PROPERTY_NAME: "Description"},
)
"""Doc: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-resource-glue-mltransform.html#cfn-glue-mltransform-description"""
p_GlueVersion: TypeHint.intrinsic_str = attr.ib(
default=None,
validator=attr.validators.optional(attr.validators.instance_of(TypeCheck.intrinsic_str_type)),
metadata={AttrMeta.PROPERTY_NAME: "GlueVersion"},
)
"""Doc: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-resource-glue-mltransform.html#cfn-glue-mltransform-glueversion"""
p_MaxCapacity: float = attr.ib(
default=None,
validator=attr.validators.optional(attr.validators.instance_of(float)),
metadata={AttrMeta.PROPERTY_NAME: "MaxCapacity"},
)
"""Doc: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-resource-glue-mltransform.html#cfn-glue-mltransform-maxcapacity"""
p_MaxRetries: int = attr.ib(
default=None,
validator=attr.validators.optional(attr.validators.instance_of(int)),
metadata={AttrMeta.PROPERTY_NAME: "MaxRetries"},
)
"""Doc: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-resource-glue-mltransform.html#cfn-glue-mltransform-maxretries"""
p_Name: TypeHint.intrinsic_str = attr.ib(
default=None,
validator=attr.validators.optional(attr.validators.instance_of(TypeCheck.intrinsic_str_type)),
metadata={AttrMeta.PROPERTY_NAME: "Name"},
)
"""Doc: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-resource-glue-mltransform.html#cfn-glue-mltransform-name"""
p_NumberOfWorkers: int = attr.ib(
default=None,
validator=attr.validators.optional(attr.validators.instance_of(int)),
metadata={AttrMeta.PROPERTY_NAME: "NumberOfWorkers"},
)
"""Doc: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-resource-glue-mltransform.html#cfn-glue-mltransform-numberofworkers"""
p_Timeout: int = attr.ib(
default=None,
validator=attr.validators.optional(attr.validators.instance_of(int)),
metadata={AttrMeta.PROPERTY_NAME: "Timeout"},
)
"""Doc: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-resource-glue-mltransform.html#cfn-glue-mltransform-timeout"""
p_TransformEncryption: typing.Union['MLTransformTransformEncryption', dict] = attr.ib(
default=None,
converter=MLTransformTransformEncryption.from_dict,
validator=attr.validators.optional(attr.validators.instance_of(MLTransformTransformEncryption)),
metadata={AttrMeta.PROPERTY_NAME: "TransformEncryption"},
)
"""Doc: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-resource-glue-mltransform.html#cfn-glue-mltransform-transformencryption"""
p_WorkerType: TypeHint.intrinsic_str = attr.ib(
default=None,
validator=attr.validators.optional(attr.validators.instance_of(TypeCheck.intrinsic_str_type)),
metadata={AttrMeta.PROPERTY_NAME: "WorkerType"},
)
"""Doc: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-resource-glue-mltransform.html#cfn-glue-mltransform-workertype"""
p_Tags: dict = attr.ib(
default=None,
validator=attr.validators.optional(attr.validators.instance_of(dict)),
metadata={AttrMeta.PROPERTY_NAME: "Tags"},
)
"""Doc: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-resource-glue-mltransform.html#cfn-glue-mltransform-tags"""
@attr.s
class Classifier(Resource):
"""
AWS Object Type = "AWS::Glue::Classifier"
Resource Document: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-resource-glue-classifier.html
Property Document:
- ``p_CsvClassifier``: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-resource-glue-classifier.html#cfn-glue-classifier-csvclassifier
- ``p_GrokClassifier``: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-resource-glue-classifier.html#cfn-glue-classifier-grokclassifier
- ``p_JsonClassifier``: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-resource-glue-classifier.html#cfn-glue-classifier-jsonclassifier
- ``p_XMLClassifier``: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-resource-glue-classifier.html#cfn-glue-classifier-xmlclassifier
"""
AWS_OBJECT_TYPE = "AWS::Glue::Classifier"
p_CsvClassifier: typing.Union['ClassifierCsvClassifier', dict] = attr.ib(
default=None,
converter=ClassifierCsvClassifier.from_dict,
validator=attr.validators.optional(attr.validators.instance_of(ClassifierCsvClassifier)),
metadata={AttrMeta.PROPERTY_NAME: "CsvClassifier"},
)
"""Doc: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-resource-glue-classifier.html#cfn-glue-classifier-csvclassifier"""
p_GrokClassifier: typing.Union['ClassifierGrokClassifier', dict] = attr.ib(
default=None,
converter=ClassifierGrokClassifier.from_dict,
validator=attr.validators.optional(attr.validators.instance_of(ClassifierGrokClassifier)),
metadata={AttrMeta.PROPERTY_NAME: "GrokClassifier"},
)
"""Doc: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-resource-glue-classifier.html#cfn-glue-classifier-grokclassifier"""
p_JsonClassifier: typing.Union['ClassifierJsonClassifier', dict] = attr.ib(
default=None,
converter=ClassifierJsonClassifier.from_dict,
validator=attr.validators.optional(attr.validators.instance_of(ClassifierJsonClassifier)),
metadata={AttrMeta.PROPERTY_NAME: "JsonClassifier"},
)
"""Doc: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-resource-glue-classifier.html#cfn-glue-classifier-jsonclassifier"""
p_XMLClassifier: typing.Union['ClassifierXMLClassifier', dict] = attr.ib(
default=None,
converter=ClassifierXMLClassifier.from_dict,
validator=attr.validators.optional(attr.validators.instance_of(ClassifierXMLClassifier)),
metadata={AttrMeta.PROPERTY_NAME: "XMLClassifier"},
)
"""Doc: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-resource-glue-classifier.html#cfn-glue-classifier-xmlclassifier"""
@attr.s
class Schema(Resource):
"""
AWS Object Type = "AWS::Glue::Schema"
Resource Document: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-resource-glue-schema.html
Property Document:
- ``rp_Compatibility``: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-resource-glue-schema.html#cfn-glue-schema-compatibility
- ``rp_DataFormat``: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-resource-glue-schema.html#cfn-glue-schema-dataformat
- ``rp_Name``: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-resource-glue-schema.html#cfn-glue-schema-name
- ``rp_SchemaDefinition``: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-resource-glue-schema.html#cfn-glue-schema-schemadefinition
- ``p_CheckpointVersion``: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-resource-glue-schema.html#cfn-glue-schema-checkpointversion
- ``p_Description``: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-resource-glue-schema.html#cfn-glue-schema-description
- ``p_Registry``: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-resource-glue-schema.html#cfn-glue-schema-registry
- ``p_Tags``: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-resource-glue-schema.html#cfn-glue-schema-tags
"""
AWS_OBJECT_TYPE = "AWS::Glue::Schema"
rp_Compatibility: TypeHint.intrinsic_str = attr.ib(
default=None,
validator=attr.validators.instance_of(TypeCheck.intrinsic_str_type),
metadata={AttrMeta.PROPERTY_NAME: "Compatibility"},
)
"""Doc: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-resource-glue-schema.html#cfn-glue-schema-compatibility"""
rp_DataFormat: TypeHint.intrinsic_str = attr.ib(
default=None,
validator=attr.validators.instance_of(TypeCheck.intrinsic_str_type),
metadata={AttrMeta.PROPERTY_NAME: "DataFormat"},
)
"""Doc: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-resource-glue-schema.html#cfn-glue-schema-dataformat"""
rp_Name: TypeHint.intrinsic_str = attr.ib(
default=None,
validator=attr.validators.instance_of(TypeCheck.intrinsic_str_type),
metadata={AttrMeta.PROPERTY_NAME: "Name"},
)
"""Doc: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-resource-glue-schema.html#cfn-glue-schema-name"""
rp_SchemaDefinition: TypeHint.intrinsic_str = attr.ib(
default=None,
validator=attr.validators.instance_of(TypeCheck.intrinsic_str_type),
metadata={AttrMeta.PROPERTY_NAME: "SchemaDefinition"},
)
"""Doc: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-resource-glue-schema.html#cfn-glue-schema-schemadefinition"""
p_CheckpointVersion: typing.Union['SchemaSchemaVersion', dict] = attr.ib(
default=None,
converter=SchemaSchemaVersion.from_dict,
validator=attr.validators.optional(attr.validators.instance_of(SchemaSchemaVersion)),
metadata={AttrMeta.PROPERTY_NAME: "CheckpointVersion"},
)
"""Doc: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-resource-glue-schema.html#cfn-glue-schema-checkpointversion"""
p_Description: TypeHint.intrinsic_str = attr.ib(
default=None,
validator=attr.validators.optional(attr.validators.instance_of(TypeCheck.intrinsic_str_type)),
metadata={AttrMeta.PROPERTY_NAME: "Description"},
)
"""Doc: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-resource-glue-schema.html#cfn-glue-schema-description"""
p_Registry: typing.Union['SchemaRegistry', dict] = attr.ib(
default=None,
converter=SchemaRegistry.from_dict,
validator=attr.validators.optional(attr.validators.instance_of(SchemaRegistry)),
metadata={AttrMeta.PROPERTY_NAME: "Registry"},
)
"""Doc: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-resource-glue-schema.html#cfn-glue-schema-registry"""
p_Tags: typing.List[typing.Union[Tag, dict]] = attr.ib(
default=None,
converter=Tag.from_list,
validator=attr.validators.optional(attr.validators.deep_iterable(member_validator=attr.validators.instance_of(Tag), iterable_validator=attr.validators.instance_of(list))),
metadata={AttrMeta.PROPERTY_NAME: "Tags"},
)
"""Doc: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-resource-glue-schema.html#cfn-glue-schema-tags"""
@property
def rv_Arn(self) -> GetAtt:
"""Doc: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-resource-glue-schema.html#aws-resource-glue-schema-return-values"""
return GetAtt(resource=self, attr_name="Arn")
@property
def rv_InitialSchemaVersionId(self) -> GetAtt:
"""Doc: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-resource-glue-schema.html#aws-resource-glue-schema-return-values"""
return GetAtt(resource=self, attr_name="InitialSchemaVersionId")
@attr.s
class Table(Resource):
"""
AWS Object Type = "AWS::Glue::Table"
Resource Document: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-resource-glue-table.html
Property Document:
- ``rp_CatalogId``: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-resource-glue-table.html#cfn-glue-table-catalogid
- ``rp_DatabaseName``: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-resource-glue-table.html#cfn-glue-table-databasename
- ``rp_TableInput``: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-resource-glue-table.html#cfn-glue-table-tableinput
"""
AWS_OBJECT_TYPE = "AWS::Glue::Table"
rp_CatalogId: TypeHint.intrinsic_str = attr.ib(
default=None,
validator=attr.validators.instance_of(TypeCheck.intrinsic_str_type),
metadata={AttrMeta.PROPERTY_NAME: "CatalogId"},
)
"""Doc: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-resource-glue-table.html#cfn-glue-table-catalogid"""
rp_DatabaseName: TypeHint.intrinsic_str = attr.ib(
default=None,
validator=attr.validators.instance_of(TypeCheck.intrinsic_str_type),
metadata={AttrMeta.PROPERTY_NAME: "DatabaseName"},
)
"""Doc: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-resource-glue-table.html#cfn-glue-table-databasename"""
rp_TableInput: typing.Union['TableTableInput', dict] = attr.ib(
default=None,
converter=TableTableInput.from_dict,
validator=attr.validators.instance_of(TableTableInput),
metadata={AttrMeta.PROPERTY_NAME: "TableInput"},
)
"""Doc: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-resource-glue-table.html#cfn-glue-table-tableinput"""
@attr.s
class Connection(Resource):
"""
AWS Object Type = "AWS::Glue::Connection"
Resource Document: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-resource-glue-connection.html
Property Document:
- ``rp_CatalogId``: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-resource-glue-connection.html#cfn-glue-connection-catalogid
- ``rp_ConnectionInput``: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-resource-glue-connection.html#cfn-glue-connection-connectioninput
"""
AWS_OBJECT_TYPE = "AWS::Glue::Connection"
rp_CatalogId: TypeHint.intrinsic_str = attr.ib(
default=None,
validator=attr.validators.instance_of(TypeCheck.intrinsic_str_type),
metadata={AttrMeta.PROPERTY_NAME: "CatalogId"},
)
"""Doc: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-resource-glue-connection.html#cfn-glue-connection-catalogid"""
rp_ConnectionInput: typing.Union['ConnectionConnectionInput', dict] = attr.ib(
default=None,
converter=ConnectionConnectionInput.from_dict,
validator=attr.validators.instance_of(ConnectionConnectionInput),
metadata={AttrMeta.PROPERTY_NAME: "ConnectionInput"},
)
"""Doc: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-resource-glue-connection.html#cfn-glue-connection-connectioninput"""
@attr.s
class Partition(Resource):
"""
AWS Object Type = "AWS::Glue::Partition"
Resource Document: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-resource-glue-partition.html
Property Document:
- ``rp_CatalogId``: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-resource-glue-partition.html#cfn-glue-partition-catalogid
- ``rp_DatabaseName``: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-resource-glue-partition.html#cfn-glue-partition-databasename
- ``rp_PartitionInput``: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-resource-glue-partition.html#cfn-glue-partition-partitioninput
- ``rp_TableName``: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-resource-glue-partition.html#cfn-glue-partition-tablename
"""
AWS_OBJECT_TYPE = "AWS::Glue::Partition"
rp_CatalogId: TypeHint.intrinsic_str = attr.ib(
default=None,
validator=attr.validators.instance_of(TypeCheck.intrinsic_str_type),
metadata={AttrMeta.PROPERTY_NAME: "CatalogId"},
)
"""Doc: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-resource-glue-partition.html#cfn-glue-partition-catalogid"""
rp_DatabaseName: TypeHint.intrinsic_str = attr.ib(
default=None,
validator=attr.validators.instance_of(TypeCheck.intrinsic_str_type),
metadata={AttrMeta.PROPERTY_NAME: "DatabaseName"},
)
"""Doc: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-resource-glue-partition.html#cfn-glue-partition-databasename"""
rp_PartitionInput: typing.Union['PartitionPartitionInput', dict] = attr.ib(
default=None,
converter=PartitionPartitionInput.from_dict,
validator=attr.validators.instance_of(PartitionPartitionInput),
metadata={AttrMeta.PROPERTY_NAME: "PartitionInput"},
)
"""Doc: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-resource-glue-partition.html#cfn-glue-partition-partitioninput"""
rp_TableName: TypeHint.intrinsic_str = attr.ib(
default=None,
validator=attr.validators.instance_of(TypeCheck.intrinsic_str_type),
metadata={AttrMeta.PROPERTY_NAME: "TableName"},
)
"""Doc: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-resource-glue-partition.html#cfn-glue-partition-tablename"""
@attr.s
class Registry(Resource):
"""
AWS Object Type = "AWS::Glue::Registry"
Resource Document: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-resource-glue-registry.html
Property Document:
- ``rp_Name``: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-resource-glue-registry.html#cfn-glue-registry-name
- ``p_Description``: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-resource-glue-registry.html#cfn-glue-registry-description
- ``p_Tags``: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-resource-glue-registry.html#cfn-glue-registry-tags
"""
AWS_OBJECT_TYPE = "AWS::Glue::Registry"
rp_Name: TypeHint.intrinsic_str = attr.ib(
default=None,
validator=attr.validators.instance_of(TypeCheck.intrinsic_str_type),
metadata={AttrMeta.PROPERTY_NAME: "Name"},
)
"""Doc: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-resource-glue-registry.html#cfn-glue-registry-name"""
p_Description: TypeHint.intrinsic_str = attr.ib(
default=None,
validator=attr.validators.optional(attr.validators.instance_of(TypeCheck.intrinsic_str_type)),
metadata={AttrMeta.PROPERTY_NAME: "Description"},
)
"""Doc: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-resource-glue-registry.html#cfn-glue-registry-description"""
p_Tags: typing.List[typing.Union[Tag, dict]] = attr.ib(
default=None,
converter=Tag.from_list,
validator=attr.validators.optional(attr.validators.deep_iterable(member_validator=attr.validators.instance_of(Tag), iterable_validator=attr.validators.instance_of(list))),
metadata={AttrMeta.PROPERTY_NAME: "Tags"},
)
"""Doc: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-resource-glue-registry.html#cfn-glue-registry-tags"""
@property
def rv_Arn(self) -> GetAtt:
"""Doc: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-resource-glue-registry.html#aws-resource-glue-registry-return-values"""
return GetAtt(resource=self, attr_name="Arn")
@attr.s
class Trigger(Resource):
"""
AWS Object Type = "AWS::Glue::Trigger"
Resource Document: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-resource-glue-trigger.html
Property Document:
- ``rp_Actions``: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-resource-glue-trigger.html#cfn-glue-trigger-actions
- ``rp_Type``: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-resource-glue-trigger.html#cfn-glue-trigger-type
- ``p_Description``: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-resource-glue-trigger.html#cfn-glue-trigger-description
- ``p_Name``: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-resource-glue-trigger.html#cfn-glue-trigger-name
- ``p_Predicate``: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-resource-glue-trigger.html#cfn-glue-trigger-predicate
- ``p_Schedule``: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-resource-glue-trigger.html#cfn-glue-trigger-schedule
- ``p_StartOnCreation``: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-resource-glue-trigger.html#cfn-glue-trigger-startoncreation
- ``p_WorkflowName``: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-resource-glue-trigger.html#cfn-glue-trigger-workflowname
- ``p_Tags``: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-resource-glue-trigger.html#cfn-glue-trigger-tags
"""
AWS_OBJECT_TYPE = "AWS::Glue::Trigger"
rp_Actions: typing.List[typing.Union['TriggerAction', dict]] = attr.ib(
default=None,
converter=TriggerAction.from_list,
validator=attr.validators.deep_iterable(member_validator=attr.validators.instance_of(TriggerAction), iterable_validator=attr.validators.instance_of(list)),
metadata={AttrMeta.PROPERTY_NAME: "Actions"},
)
"""Doc: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-resource-glue-trigger.html#cfn-glue-trigger-actions"""
rp_Type: TypeHint.intrinsic_str = attr.ib(
default=None,
validator=attr.validators.instance_of(TypeCheck.intrinsic_str_type),
metadata={AttrMeta.PROPERTY_NAME: "Type"},
)
"""Doc: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-resource-glue-trigger.html#cfn-glue-trigger-type"""
p_Description: TypeHint.intrinsic_str = attr.ib(
default=None,
validator=attr.validators.optional(attr.validators.instance_of(TypeCheck.intrinsic_str_type)),
metadata={AttrMeta.PROPERTY_NAME: "Description"},
)
"""Doc: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-resource-glue-trigger.html#cfn-glue-trigger-description"""
p_Name: TypeHint.intrinsic_str = attr.ib(
default=None,
validator=attr.validators.optional(attr.validators.instance_of(TypeCheck.intrinsic_str_type)),
metadata={AttrMeta.PROPERTY_NAME: "Name"},
)
"""Doc: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-resource-glue-trigger.html#cfn-glue-trigger-name"""
p_Predicate: typing.Union['TriggerPredicate', dict] = attr.ib(
default=None,
converter=TriggerPredicate.from_dict,
validator=attr.validators.optional(attr.validators.instance_of(TriggerPredicate)),
metadata={AttrMeta.PROPERTY_NAME: "Predicate"},
)
"""Doc: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-resource-glue-trigger.html#cfn-glue-trigger-predicate"""
p_Schedule: TypeHint.intrinsic_str = attr.ib(
default=None,
validator=attr.validators.optional(attr.validators.instance_of(TypeCheck.intrinsic_str_type)),
metadata={AttrMeta.PROPERTY_NAME: "Schedule"},
)
"""Doc: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-resource-glue-trigger.html#cfn-glue-trigger-schedule"""
p_StartOnCreation: bool = attr.ib(
default=None,
validator=attr.validators.optional(attr.validators.instance_of(bool)),
metadata={AttrMeta.PROPERTY_NAME: "StartOnCreation"},
)
"""Doc: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-resource-glue-trigger.html#cfn-glue-trigger-startoncreation"""
p_WorkflowName: TypeHint.intrinsic_str = attr.ib(
default=None,
validator=attr.validators.optional(attr.validators.instance_of(TypeCheck.intrinsic_str_type)),
metadata={AttrMeta.PROPERTY_NAME: "WorkflowName"},
)
"""Doc: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-resource-glue-trigger.html#cfn-glue-trigger-workflowname"""
p_Tags: dict = attr.ib(
default=None,
validator=attr.validators.optional(attr.validators.instance_of(dict)),
metadata={AttrMeta.PROPERTY_NAME: "Tags"},
)
"""Doc: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-resource-glue-trigger.html#cfn-glue-trigger-tags"""
@attr.s
class SecurityConfiguration(Resource):
"""
AWS Object Type = "AWS::Glue::SecurityConfiguration"
Resource Document: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-resource-glue-securityconfiguration.html
Property Document:
- ``rp_EncryptionConfiguration``: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-resource-glue-securityconfiguration.html#cfn-glue-securityconfiguration-encryptionconfiguration
- ``rp_Name``: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-resource-glue-securityconfiguration.html#cfn-glue-securityconfiguration-name
"""
AWS_OBJECT_TYPE = "AWS::Glue::SecurityConfiguration"
rp_EncryptionConfiguration: typing.Union['SecurityConfigurationEncryptionConfiguration', dict] = attr.ib(
default=None,
converter=SecurityConfigurationEncryptionConfiguration.from_dict,
validator=attr.validators.instance_of(SecurityConfigurationEncryptionConfiguration),
metadata={AttrMeta.PROPERTY_NAME: "EncryptionConfiguration"},
)
"""Doc: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-resource-glue-securityconfiguration.html#cfn-glue-securityconfiguration-encryptionconfiguration"""
rp_Name: TypeHint.intrinsic_str = attr.ib(
default=None,
validator=attr.validators.instance_of(TypeCheck.intrinsic_str_type),
metadata={AttrMeta.PROPERTY_NAME: "Name"},
)
"""Doc: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-resource-glue-securityconfiguration.html#cfn-glue-securityconfiguration-name"""
| 63.358171
| 294
| 0.761984
| 21,914
| 198,121
| 6.797116
| 0.01442
| 0.035125
| 0.048297
| 0.074642
| 0.925278
| 0.92415
| 0.904815
| 0.868085
| 0.867367
| 0.866836
| 0
| 0.00036
| 0.103795
| 198,121
| 3,126
| 295
| 63.378439
| 0.838536
| 0.326048
| 0
| 0.53709
| 0
| 0
| 0.084456
| 0.043964
| 0
| 0
| 0
| 0
| 0
| 1
| 0.0023
| false
| 0.0046
| 0.0023
| 0
| 0.257619
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 6
|
1bf0ca336ca50628ba7c13df344dc98dac687ce7
| 3,871
|
py
|
Python
|
tests/test_phylogeny_metrics.py
|
alackles/alife-std-dev-python
|
9d2beb21fb7bb10a6481a9154cf0b71f98885b4e
|
[
"MIT"
] | null | null | null |
tests/test_phylogeny_metrics.py
|
alackles/alife-std-dev-python
|
9d2beb21fb7bb10a6481a9154cf0b71f98885b4e
|
[
"MIT"
] | 10
|
2019-03-04T16:28:05.000Z
|
2019-03-15T20:32:03.000Z
|
tests/test_phylogeny_metrics.py
|
alackles/alife-std-dev-python
|
9d2beb21fb7bb10a6481a9154cf0b71f98885b4e
|
[
"MIT"
] | 2
|
2019-03-04T18:44:47.000Z
|
2021-11-04T16:35:34.000Z
|
import ALifeStdDev.phylogeny as phylodev
import pytest
def test_get_asexual_lineage_length():
toy_lineage_fname = "example_data/example-standard-toy-asexual-lineage.csv"
lineage = phylodev.load_phylogeny_to_networkx(toy_lineage_fname)
length = phylodev.get_asexual_lineage_length(lineage)
assert length == 8
def test_get_asexual_lineage_num_discrete_state_changes():
toy_lineage_fname = "example_data/example-standard-toy-asexual-lineage.csv"
lineage = phylodev.load_phylogeny_to_networkx(toy_lineage_fname)
assert 4 == phylodev.get_asexual_lineage_num_discrete_state_changes(lineage, attribute_list=["genotype"])
assert 4 == phylodev.get_asexual_lineage_num_discrete_state_changes(lineage, attribute_list=["genotype","trait_a"])
assert 3 == phylodev.get_asexual_lineage_num_discrete_state_changes(lineage, attribute_list=["trait_a"])
assert 2 == phylodev.get_asexual_lineage_num_discrete_state_changes(lineage, attribute_list=["trait_b"])
with pytest.raises(Exception):
phylodev.get_asexual_lineage_num_discrete_state_changes(lineage, attribute_list=["garbage_attribute_that_nothing_should_have"])
def test_get_asexual_lineage_num_discrete_unique_states():
toy_lineage_fname = "example_data/example-standard-toy-asexual-lineage.csv"
lineage = phylodev.load_phylogeny_to_networkx(toy_lineage_fname)
assert 4 == phylodev.get_asexual_lineage_num_discrete_unique_states(lineage, attribute_list=["genotype"])
assert 2 == phylodev.get_asexual_lineage_num_discrete_unique_states(lineage, attribute_list=["trait_b"])
with pytest.raises(Exception):
phylodev.get_asexual_lineage_num_discrete_unique_states(lineage, attribute_list=["garbage_attribute_that_nothing_should_have"])
def test_get_asexual_lineage_mutation_accumulation():
toy_lineage_fname = "example_data/example-standard-toy-asexual-lineage.csv"
lineage = phylodev.load_phylogeny_to_networkx(toy_lineage_fname)
mut_dist = phylodev.get_asexual_lineage_mutation_accumulation(lineage, mutation_attributes=["sub_mut_cnt","reverse_mut_cnt"])
assert mut_dist["sub_mut_cnt"] == 2
assert mut_dist["reverse_mut_cnt"] == 1
with pytest.raises(Exception):
mut_dist = phylodev.get_asexual_lineage_mutation_accumulation(lineage, mutation_attributes=["garbage_attribute_that_nothing_should_have"])
def test_get_mrca_tree_depth():
single_root_fname = "example_data/example-standard-toy-asexual-phylogeny.csv"
sroot = phylodev.load_phylogeny_to_networkx(single_root_fname)
depth = phylodev.get_mrca_tree_depth_asexual(sroot)
assert depth == 0
depth = phylodev.get_mrca_tree_depth_asexual(sroot, [3,4,5])
assert depth == 0
depth = phylodev.get_mrca_tree_depth_asexual(sroot, [3,4])
assert depth == 1
depth = phylodev.get_mrca_tree_depth_asexual(sroot, [5,0])
assert depth == 0
depth = phylodev.get_mrca_tree_depth_asexual(sroot, [0,1,2,3,4,5])
assert depth == 0
depth = phylodev.get_mrca_tree_depth_asexual(sroot, [2])
assert depth == 1
depth = phylodev.get_mrca_tree_depth_asexual(sroot, [5])
assert depth == 2
def test_calc_phylogenetic_diversity_asexual():
single_root_fname = "example_data/example-standard-toy-asexual-phylogeny-not-pruned.csv"
sroot = phylodev.load_phylogeny_to_networkx(single_root_fname)
diversity = phylodev.calc_phylogenetic_diversity_asexual(sroot, [3,4,5])
assert diversity == 6
diversity = phylodev.calc_phylogenetic_diversity_asexual(sroot, [3,4,5,8])
assert diversity == 7
if __name__ == "__main__":
test_get_asexual_lineage_length()
test_get_asexual_lineage_num_discrete_state_changes()
test_get_asexual_lineage_num_discrete_unique_states()
test_get_asexual_lineage_mutation_accumulation()
test_get_mrca_tree_depth()
test_calc_phylogenetic_diversity_asexual()
| 46.638554
| 146
| 0.795918
| 524
| 3,871
| 5.398855
| 0.139313
| 0.113821
| 0.114175
| 0.084836
| 0.900318
| 0.840226
| 0.816543
| 0.815483
| 0.741251
| 0.724638
| 0
| 0.011665
| 0.114182
| 3,871
| 83
| 147
| 46.638554
| 0.813357
| 0
| 0
| 0.306452
| 0
| 0
| 0.147469
| 0.118543
| 0
| 0
| 0
| 0
| 0.290323
| 1
| 0.096774
| false
| 0
| 0.032258
| 0
| 0.129032
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 6
|
400c1768f6579eed3113869d352130bd9e2c219d
| 47
|
py
|
Python
|
src/__init__.py
|
students124/TugasSearch
|
8ec87d79c5dc933180795a1f4b811c6da9612526
|
[
"MIT"
] | null | null | null |
src/__init__.py
|
students124/TugasSearch
|
8ec87d79c5dc933180795a1f4b811c6da9612526
|
[
"MIT"
] | null | null | null |
src/__init__.py
|
students124/TugasSearch
|
8ec87d79c5dc933180795a1f4b811c6da9612526
|
[
"MIT"
] | null | null | null |
from .Graph import Graph
# from .GUI import GUI
| 23.5
| 24
| 0.765957
| 8
| 47
| 4.5
| 0.5
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.170213
| 47
| 2
| 25
| 23.5
| 0.923077
| 0.425532
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| true
| 0
| 1
| 0
| 1
| 0
| 1
| 1
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 0
| 1
| 0
|
0
| 6
|
400e1b25a472ec86f677953a0fba861de4bb85fe
| 3,850
|
py
|
Python
|
tests/test_transformer/test_dropout/test_dropout_transormer.py
|
uTensor/utensor_cgen
|
eccd6859028d0b6a350dced25ea72ff02faaf9ad
|
[
"Apache-2.0"
] | 49
|
2018-01-06T12:57:56.000Z
|
2021-09-03T09:48:32.000Z
|
tests/test_transformer/test_dropout/test_dropout_transormer.py
|
uTensor/utensor_cgen
|
eccd6859028d0b6a350dced25ea72ff02faaf9ad
|
[
"Apache-2.0"
] | 101
|
2018-01-16T19:24:21.000Z
|
2021-11-10T19:39:33.000Z
|
tests/test_transformer/test_dropout/test_dropout_transormer.py
|
uTensor/utensor_cgen
|
eccd6859028d0b6a350dced25ea72ff02faaf9ad
|
[
"Apache-2.0"
] | 32
|
2018-02-15T19:39:50.000Z
|
2020-11-26T22:32:05.000Z
|
import pytest
import tensorflow.compat.v1 as tf
from utensor_cgen.frontend.tensorflow import GraphDefParser
from utensor_cgen.transformer.ns_transformer import (DropoutTransformer,
DropoutTransformerV2)
def test_dropout_trans_1_1(droput_graph_tuple):
(graph_def,
(rate_name, dropout_output_name),
output_nodes) = droput_graph_tuple
ugraph = GraphDefParser(config={}).parse(graph_def, output_nodes=output_nodes)
transformer = DropoutTransformer()
assert transformer.prune_graph
new_ugraph = transformer.transform(ugraph)
for op in new_ugraph.ops_info.values():
assert op.ugraph
out_op = new_ugraph.ops_info[output_nodes[0]]
assert set([str(op.name) for op in out_op.input_nodes]) == set(['x', 'bias'])
# all dropout nodes should be gone
graph_1 = tf.Graph()
graph_2 = tf.Graph()
with graph_1.as_default():
tf.import_graph_def(ugraph.graph_def, name='')
with graph_2.as_default():
tf.import_graph_def(new_ugraph.graph_def, name='')
with tf.Session(graph=graph_1):
rate = graph_1.get_tensor_by_name(rate_name)
dropout_output = graph_1.get_tensor_by_name(dropout_output_name)
output = graph_1.get_tensor_by_name(output_nodes[0]+":0")
# test the dropout ops are gone
assert rate.op.name not in new_ugraph.ops_info
assert dropout_output.op.name not in new_ugraph.ops_info
output_1 = output.eval({rate: 0.0})
with tf.Session(graph=graph_2):
output = graph_2.get_tensor_by_name(output_nodes[0]+":0")
output_2 = output.eval()
# expecting the same outputs with keep_prob == 1.0
assert (output_1 == output_2).all()
@pytest.mark.deprecated
def test_dropout_trans_1_2(droput_graph_tuple):
(graph_def,
(keep_prob_name, dropout_output_name),
output_nodes) = droput_graph_tuple
ugraph = GraphDefParser(config={}).parse(graph_def, output_nodes=output_nodes)
transformer = DropoutTransformerV2()
assert transformer.prune_graph
new_ugraph = transformer.transform(ugraph)
for op in new_ugraph.ops_info.values():
assert op.ugraph
out_op = new_ugraph.ops_info[output_nodes[0]]
assert set([str(op.name) for op in out_op.input_nodes]) == set(['x', 'bias'])
# all dropout nodes should be gone
graph_1 = tf.Graph()
graph_2 = tf.Graph()
with graph_1.as_default():
tf.import_graph_def(ugraph.graph_def, name='')
with graph_2.as_default():
tf.import_graph_def(new_ugraph.graph_def, name='')
with tf.Session(graph=graph_1):
keep_prob = graph_1.get_tensor_by_name(keep_prob_name)
dropout_output = graph_1.get_tensor_by_name(dropout_output_name)
output = graph_1.get_tensor_by_name(output_nodes[0]+":0")
# test the dropout ops are gone
assert keep_prob.op.name not in new_ugraph.ops_info
assert dropout_output.op.name not in new_ugraph.ops_info
output_1 = output.eval({keep_prob:1.0})
with tf.Session(graph=graph_2):
output = graph_2.get_tensor_by_name(output_nodes[0]+":0")
output_2 = output.eval()
# expecting the same outputs with keep_prob == 1.0
assert (output_1 == output_2).all()
@pytest.mark.deprecated
def test_dropout_trans_2(dropout_graph_tuple2):
graph_def, output_nodes = dropout_graph_tuple2
ugraph = GraphDefParser(config={}).parse(graph_def, output_nodes=output_nodes)
trans = DropoutTransformerV2()
new_ugraph = trans.transform(ugraph)
assert len(new_ugraph.ops_info) == 1
assert 'x' in new_ugraph.ops_info
@pytest.mark.deprecated
def test_dropout_vgg(vgg_ugraph):
trans = DropoutTransformerV2()
new_ugraph = trans.transform(vgg_ugraph)
for op_name in new_ugraph.ops_info:
assert not op_name.startswith('dropout')
| 42.307692
| 82
| 0.708831
| 562
| 3,850
| 4.535587
| 0.131673
| 0.060024
| 0.051785
| 0.069047
| 0.837976
| 0.797568
| 0.720675
| 0.720675
| 0.720675
| 0.720675
| 0
| 0.018916
| 0.18987
| 3,850
| 90
| 83
| 42.777778
| 0.798333
| 0.057922
| 0
| 0.666667
| 0
| 0
| 0.007182
| 0
| 0
| 0
| 0
| 0
| 0.192308
| 1
| 0.051282
| false
| 0
| 0.102564
| 0
| 0.153846
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 6
|
4021b880523b14fd4fe118245bbab0f2c0ce9d31
| 32
|
py
|
Python
|
app/services/game_data/__init__.py
|
somespecialone/clever-inspect
|
8735e0b445c8e7e9b83c627d4a5fbed1428c1891
|
[
"MIT"
] | 1
|
2022-03-12T05:44:12.000Z
|
2022-03-12T05:44:12.000Z
|
app/services/game_data/__init__.py
|
somespecialone/clever-inspect
|
8735e0b445c8e7e9b83c627d4a5fbed1428c1891
|
[
"MIT"
] | null | null | null |
app/services/game_data/__init__.py
|
somespecialone/clever-inspect
|
8735e0b445c8e7e9b83c627d4a5fbed1428c1891
|
[
"MIT"
] | null | null | null |
from .game_data import GameData
| 16
| 31
| 0.84375
| 5
| 32
| 5.2
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.125
| 32
| 1
| 32
| 32
| 0.928571
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| true
| 0
| 1
| 0
| 1
| 0
| 1
| 1
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 0
| 1
| 0
|
0
| 6
|
4024f03f3c9deaa2cb223b9ddcd6a3aefc88cf73
| 49
|
py
|
Python
|
fintech_ibkr/__init__.py
|
ls4351/testapp
|
83238af14e07e2dcc7476244a00bbc05b6f960f3
|
[
"MIT"
] | null | null | null |
fintech_ibkr/__init__.py
|
ls4351/testapp
|
83238af14e07e2dcc7476244a00bbc05b6f960f3
|
[
"MIT"
] | null | null | null |
fintech_ibkr/__init__.py
|
ls4351/testapp
|
83238af14e07e2dcc7476244a00bbc05b6f960f3
|
[
"MIT"
] | null | null | null |
from fintech_ibkr.synchronous_functions import *
| 24.5
| 48
| 0.877551
| 6
| 49
| 6.833333
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.081633
| 49
| 1
| 49
| 49
| 0.911111
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| true
| 0
| 1
| 0
| 1
| 0
| 1
| 1
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 0
| 1
| 0
|
0
| 6
|
402ad1449bf7ecbb0c9e424928ea439978226a3a
| 13
|
py
|
Python
|
db/utils/__init__.py
|
cuichuan123456/transform-culane
|
5e1de763100ae53b1c5c66cf168c09999cf75139
|
[
"BSD-3-Clause"
] | 540
|
2020-11-10T11:09:00.000Z
|
2022-03-30T01:49:33.000Z
|
db/utils/__init__.py
|
cuichuan123456/transform-culane
|
5e1de763100ae53b1c5c66cf168c09999cf75139
|
[
"BSD-3-Clause"
] | 83
|
2020-11-14T17:43:34.000Z
|
2022-03-22T08:49:23.000Z
|
db/utils/__init__.py
|
cuichuan123456/transform-culane
|
5e1de763100ae53b1c5c66cf168c09999cf75139
|
[
"BSD-3-Clause"
] | 122
|
2020-11-12T01:29:12.000Z
|
2022-03-23T09:50:50.000Z
|
# 2020 07 09
| 6.5
| 12
| 0.615385
| 3
| 13
| 2.666667
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.888889
| 0.307692
| 13
| 1
| 13
| 13
| 0
| 0.769231
| 0
| null | 0
| null | 0
| 0
| null | 0
| 0
| 0
| null | 1
| null | true
| 0
| 0
| null | null | null | 1
| 1
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 1
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
|
0
| 6
|
4035346bef3c7b4eacc542c54237dbbc1c19a731
| 102
|
py
|
Python
|
pyhive/__init__.py
|
mayurinehate/PyHive
|
a7e106262b1cd61b9be869948574e23c96c16bc9
|
[
"Apache-2.0"
] | null | null | null |
pyhive/__init__.py
|
mayurinehate/PyHive
|
a7e106262b1cd61b9be869948574e23c96c16bc9
|
[
"Apache-2.0"
] | null | null | null |
pyhive/__init__.py
|
mayurinehate/PyHive
|
a7e106262b1cd61b9be869948574e23c96c16bc9
|
[
"Apache-2.0"
] | null | null | null |
from __future__ import absolute_import
from __future__ import unicode_literals
__version__ = '0.6.12'
| 25.5
| 39
| 0.843137
| 14
| 102
| 5.142857
| 0.714286
| 0.277778
| 0.444444
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.043956
| 0.107843
| 102
| 3
| 40
| 34
| 0.747253
| 0
| 0
| 0
| 0
| 0
| 0.058824
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| false
| 0
| 0.666667
| 0
| 0.666667
| 0
| 1
| 0
| 0
| null | 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 0
|
0
| 6
|
40c9ce98e8caea912ddf2f48445b662f87b9a913
| 21,830
|
py
|
Python
|
tests/python/relax/test_transform_fuse_tir.py
|
psrivas2/relax
|
4329af78eb1dc4c4ff8a61d3bf39aa4034e9cb2a
|
[
"Apache-2.0"
] | 90
|
2021-11-30T11:58:10.000Z
|
2022-03-31T02:24:04.000Z
|
tests/python/relax/test_transform_fuse_tir.py
|
psrivas2/relax
|
4329af78eb1dc4c4ff8a61d3bf39aa4034e9cb2a
|
[
"Apache-2.0"
] | 64
|
2021-11-22T23:58:23.000Z
|
2022-03-31T03:19:22.000Z
|
tests/python/relax/test_transform_fuse_tir.py
|
psrivas2/relax
|
4329af78eb1dc4c4ff8a61d3bf39aa4034e9cb2a
|
[
"Apache-2.0"
] | 27
|
2021-12-09T22:39:27.000Z
|
2022-03-24T23:21:48.000Z
|
# Licensed to the Apache Software Foundation (ASF) under one
# or more contributor license agreements. See the NOTICE file
# distributed with this work for additional information
# regarding copyright ownership. The ASF licenses this file
# to you under the Apache License, Version 2.0 (the
# "License"); you may not use this file except in compliance
# with the License. You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing,
# software distributed under the License is distributed on an
# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
# KIND, either express or implied. See the License for the
# specific language governing permissions and limitations
# under the License.
import pytest
import sys
import tvm
from tvm import topi
from tvm import relax
def _check(mod_before, mod_expected):
mod = relax.transform.FuseTIR()(mod_before)
tvm.ir.assert_structural_equal(mod, mod_expected)
def test_simple():
def before():
bb = relax.BlockBuilder()
x = relax.Var("x", [10, 20], relax.DynTensorType(2, "float32"))
p0 = relax.Var("p0", (), relax.DynTensorType(0, "float32"))
with bb.function("fused_add_exp_squeeze", [x, p0], attrs={"Primitive": True}):
with bb.dataflow():
lv0 = bb.emit_te(topi.add, x, p0)
lv1 = bb.emit_te(topi.exp, lv0)
gv = bb.emit_output(bb.call_te(topi.squeeze, lv1))
bb.emit_func_output(gv)
fused_add_exp_squeeze = bb.get().get_global_var("fused_add_exp_squeeze")
x = relax.Var("x", [10, 20], relax.DynTensorType(2, "float32"))
with bb.function("main", [x, p0]):
with bb.dataflow():
gv = bb.emit_output(relax.Call(fused_add_exp_squeeze, [x, p0]))
bb.emit_func_output(gv)
return bb.get()
def expected():
def fused_add_exp_squeeze(x, p0):
add = topi.add(x, p0)
exp = topi.exp(add)
squeeze = topi.squeeze(exp)
return squeeze
bb = relax.BlockBuilder()
x = relax.Var("x", [10, 20], relax.DynTensorType(2, "float32"))
p0 = relax.Var("p0", (), relax.DynTensorType(0, "float32"))
with bb.function("main", [x, p0]):
with bb.dataflow():
gv = bb.emit_output(bb.call_te(fused_add_exp_squeeze, x, p0))
bb.emit_func_output(gv)
return bb.get()
_check(before(), expected())
def test_conv2d_fuse():
def before(dtype):
bb = relax.BlockBuilder()
tensor_type = relax.DynTensorType(4, dtype)
# Grouped function 1
x = relax.Var("x", (1, 16, 64, 64), tensor_type)
w = relax.Var("w", (16, 16, 3, 3), tensor_type)
p0 = relax.Var("p0", (), relax.DynTensorType(0, dtype))
with bb.function("fused_conv2d_add1_add2", [x, w, p0], attrs={"Primitive": True}):
with bb.dataflow():
lv0 = bb.emit_te(
topi.nn.conv2d,
x,
w,
strides=1,
padding=1,
dilation=1,
primfunc_name_hint="conv2d",
)
lv1 = bb.emit_te(topi.add, p0, lv0, primfunc_name_hint="add1")
gv = bb.emit_output(bb.call_te(topi.add, lv0, lv1, primfunc_name_hint="add2"))
bb.emit_func_output(gv)
# Grouped function 2
x = relax.Var("x", (1, 16, 64, 64), tensor_type)
w = relax.Var("w", (16, 16, 1, 1), tensor_type)
y = relax.Var("y", (1, 16, 64, 64), tensor_type)
with bb.function("fused_conv2d1_add2", [x, w, y], attrs={"Primitive": True}):
with bb.dataflow():
lv0 = bb.emit_te(
topi.nn.conv2d,
x,
w,
strides=1,
padding=0,
dilation=1,
primfunc_name_hint="conv2d1",
)
gv = bb.emit_output(bb.call_te(topi.add, lv0, y, primfunc_name_hint="add2"))
bb.emit_func_output(gv)
# Get the global variables of the grouped functions
mod = bb.get()
fused_conv2d_add1_add2 = mod.get_global_var("fused_conv2d_add1_add2")
fused_conv2d1_add2 = mod.get_global_var("fused_conv2d1_add2")
# Main function
x = relax.Var("x", (1, 16, 64, 64), tensor_type)
w1 = relax.Var("w1", (16, 16, 3, 3), tensor_type)
w2 = relax.Var("w2", (16, 16, 1, 1), tensor_type)
w3 = relax.Var("w3", (16, 16, 3, 3), tensor_type)
with bb.function("main", [x, w1, w2, w3]):
with bb.dataflow():
lv0 = bb.emit_te(topi.add, x, relax.const(1, dtype))
lv1 = bb.emit(relax.Call(fused_conv2d_add1_add2, [lv0, w1, relax.const(1, dtype)]))
lv2 = bb.emit_te(
topi.nn.conv2d,
lv1,
w3,
strides=1,
padding=1,
dilation=1,
)
gv = bb.emit_output(relax.Call(fused_conv2d1_add2, [lv1, w2, lv2]))
bb.emit_func_output(gv)
return bb.get()
def expected(dtype):
def fused_conv2d_add1_add2(x, w, p):
conv = topi.nn.conv2d(x, w, strides=1, padding=1, dilation=1)
add = topi.add(p, conv)
return topi.add(conv, add)
def fused_conv2d1_add2(x, w, p):
conv = topi.nn.conv2d(x, w, strides=1, padding=0, dilation=1)
return topi.add(conv, p)
bb = relax.BlockBuilder()
tensor_type = relax.DynTensorType(4, dtype)
# Main function
x = relax.Var("x", (1, 16, 64, 64), tensor_type)
w1 = relax.Var("w1", (16, 16, 3, 3), tensor_type)
w2 = relax.Var("w2", (16, 16, 1, 1), tensor_type)
w3 = relax.Var("w3", (16, 16, 3, 3), tensor_type)
with bb.function("main", [x, w1, w2, w3]):
with bb.dataflow():
lv0 = bb.emit_te(topi.add, x, relax.const(1, dtype))
lv1 = bb.emit_te(fused_conv2d_add1_add2, lv0, w1, relax.const(1, dtype))
lv2 = bb.emit_te(
topi.nn.conv2d,
lv1,
w3,
strides=1,
padding=1,
dilation=1,
)
gv = bb.emit_output(bb.call_te(fused_conv2d1_add2, lv1, w2, lv2))
bb.emit_func_output(gv)
return bb.get()
_check(before("float32"), expected("float32"))
def test_two_subfunction():
def before():
bb = relax.BlockBuilder()
x1 = relax.Var("x1", [10, 20], relax.DynTensorType(2, "float32"))
with bb.function("fused_exp_squeeze", [x1], attrs={"Primitive": True}):
with bb.dataflow():
lv1 = bb.emit_te(topi.exp, x1)
gv = bb.emit_output(bb.call_te(topi.squeeze, lv1))
bb.emit_func_output(gv)
mod = bb.get()
func_gv = mod.get_global_var("fused_exp_squeeze")
x = relax.Var("x", [10, 20], relax.DynTensorType(2, "float32"))
with bb.function("main", [x]):
with bb.dataflow():
lv = bb.emit(relax.Call(func_gv, [x]))
lv2 = bb.emit(relax.Call(func_gv, [lv]))
gv = bb.emit_output(lv2)
bb.emit_func_output(gv)
return bb.get()
def expected():
def fused_exp_squeeze(x):
exp = topi.exp(x)
squeeze = topi.squeeze(exp)
return squeeze
bb = relax.BlockBuilder()
x = relax.Var("x", [10, 20], relax.DynTensorType(2, "float32"))
with bb.function("main", [x]):
with bb.dataflow():
lv = bb.emit_te(fused_exp_squeeze, x)
lv2 = bb.emit_te(fused_exp_squeeze, lv)
gv = bb.emit_output(lv2)
bb.emit_func_output(gv)
return bb.get()
_check(before(), expected())
def test_fuse_same_primfunc():
def before():
bb = relax.BlockBuilder()
x1 = relax.Var("x1", [10, 20], relax.DynTensorType(2, "float32"))
with bb.function("fused_exp_exp_squeeze", [x1], attrs={"Primitive": True}):
with bb.dataflow():
lv1 = bb.emit_te(topi.exp, x1)
lv2 = bb.emit_te(topi.exp, lv1)
gv = bb.emit_output(bb.call_te(topi.squeeze, lv2))
bb.emit_func_output(gv)
mod = bb.get()
func_gv = mod.get_global_var("fused_exp_exp_squeeze")
x = relax.Var("x", [10, 20], relax.DynTensorType(2, "float32"))
with bb.function("main", [x]):
with bb.dataflow():
lv = bb.emit(relax.Call(func_gv, [x]))
gv = bb.emit_output(lv)
bb.emit_func_output(gv)
return bb.get()
def expected():
def fused_exp_exp_squeeze(x):
exp = topi.exp(x)
exp = topi.exp(exp)
squeeze = topi.squeeze(exp)
return squeeze
bb = relax.BlockBuilder()
x = relax.Var("x", [10, 20], relax.DynTensorType(2, "float32"))
with bb.function("main", [x]):
with bb.dataflow():
lv = bb.emit_te(fused_exp_exp_squeeze, x)
gv = bb.emit_output(lv)
bb.emit_func_output(gv)
return bb.get()
_check(before(), expected())
def test_fuse_with_tuple_as_param():
dyn_tensor_type = relax.DynTensorType(1, "float32")
tuple_type = relax.TupleType([dyn_tensor_type, dyn_tensor_type])
tuple_shape = relax.Tuple([relax.ShapeExpr([10]), relax.ShapeExpr([10])])
def before():
bb = relax.BlockBuilder()
x = relax.Var("x", tuple_shape, tuple_type)
with bb.function("fused_exp_add", [x], attrs={"Primitive": True}):
with bb.dataflow():
lv0 = bb.emit(relax.TupleGetItem(x, 0))
lv1 = bb.emit(relax.TupleGetItem(x, 1))
lv2 = bb.emit_te(topi.exp, lv0)
gv = bb.emit_output(bb.call_te(topi.add, lv2, lv1))
bb.emit_func_output(gv)
mod = bb.get()
func_gv = mod.get_global_var("fused_exp_add")
x = relax.Var("x", tuple_shape, tuple_type)
with bb.function("main", [x]):
with bb.dataflow():
gv = bb.emit_output(relax.Call(func_gv, [x]))
bb.emit_func_output(gv)
return bb.get()
def expected():
def fused_exp_add(x1, x2):
exp = topi.exp(x1)
return topi.add(exp, x2)
bb = relax.BlockBuilder()
dyn_tensor_type = relax.DynTensorType(1, "float32")
tuple_type = relax.TupleType([dyn_tensor_type, dyn_tensor_type])
tuple_shape = relax.Tuple([relax.ShapeExpr([10]), relax.ShapeExpr([10])])
x = relax.Var("x", tuple_shape, tuple_type)
with bb.function("main", [x]):
with bb.dataflow():
lv0 = bb.emit(relax.TupleGetItem(x, 0))
lv1 = bb.emit(relax.TupleGetItem(x, 1))
gv = bb.emit_output(bb.call_te(fused_exp_add, lv0, lv1))
bb.emit_func_output(gv)
return bb.get()
_check(before(), expected())
def test_fuse_with_nested_tuple_as_param():
dyn_tensor_type = relax.DynTensorType(1, "float32")
tuple_type = relax.TupleType(
[dyn_tensor_type, relax.TupleType([dyn_tensor_type, dyn_tensor_type])]
)
shape = relax.ShapeExpr([10])
tuple_shape = relax.Tuple([shape, relax.Tuple([shape, shape])])
def before():
bb = relax.BlockBuilder()
x = relax.Var("x", tuple_shape, tuple_type)
with bb.function("fused_exp_add_add", [x], attrs={"Primitive": True}):
with bb.dataflow():
lv0 = bb.emit(relax.TupleGetItem(x, 0))
lv0_exp = bb.emit_te(topi.exp, lv0)
lv1 = bb.emit(relax.TupleGetItem(x, 1))
lv1_0 = bb.emit(relax.TupleGetItem(lv1, 0))
lv1_1 = bb.emit(relax.TupleGetItem(lv1, 1))
lv2 = bb.emit_te(topi.add, lv1_0, lv1_1)
gv = bb.emit_output(bb.call_te(topi.add, lv0_exp, lv2))
bb.emit_func_output(gv)
mod = bb.get()
func_gv = mod.get_global_var("fused_exp_add_add")
x = relax.Var("x", tuple_shape, tuple_type)
with bb.function("main", [x]):
with bb.dataflow():
gv = bb.emit_output(relax.Call(func_gv, [x]))
bb.emit_func_output(gv)
return bb.get()
def expected():
def fused_exp_add_add(x1, x2, x3):
exp = topi.exp(x1)
add = topi.add(x2, x3)
return topi.add(exp, add)
bb = relax.BlockBuilder()
x = relax.Var("x", tuple_shape, tuple_type)
with bb.function("main", [x]):
with bb.dataflow():
lv0 = bb.emit(relax.TupleGetItem(x, 0))
lv1 = bb.emit(relax.TupleGetItem(x, 1))
lv2 = bb.emit(relax.TupleGetItem(lv1, 0))
lv3 = bb.emit(relax.TupleGetItem(lv1, 1))
gv = bb.emit_output(bb.call_te(fused_exp_add_add, lv0, lv2, lv3))
bb.emit_func_output(gv)
return bb.get()
_check(before(), expected())
def test_fuse_with_call_tir_in_main():
def before():
bb = relax.BlockBuilder()
x1 = relax.Var("x1", [10, 20], relax.DynTensorType(2, "float32"))
with bb.function("fused_exp_squeeze", [x1], attrs={"Primitive": True}):
with bb.dataflow():
lv = bb.emit_te(topi.exp, x1)
gv = bb.emit_output(bb.call_te(topi.squeeze, lv))
bb.emit_func_output(gv)
mod = bb.get()
func_gv = mod.get_global_var("fused_exp_squeeze")
x = relax.Var("x", [10, 20], relax.DynTensorType(2, "float32"))
with bb.function("main", [x]):
with bb.dataflow():
lv0 = bb.emit(relax.Call(func_gv, [x]))
lv1 = bb.emit_te(topi.add, lv0, relax.const(1, "float32"))
gv = bb.emit_output(lv1)
bb.emit_func_output(gv)
return bb.get()
def expected():
def fused_exp_squeeze(x):
exp = topi.exp(x)
squeeze = topi.squeeze(exp)
return squeeze
bb = relax.BlockBuilder()
x = relax.Var("x", [10, 20], relax.DynTensorType(2, "float32"))
with bb.function("main", [x]):
with bb.dataflow():
lv = bb.emit_te(fused_exp_squeeze, x)
lv2 = bb.emit_te(topi.add, lv, relax.const(1, "float32"))
gv = bb.emit_output(lv2)
bb.emit_func_output(gv)
return bb.get()
_check(before(), expected())
def test_fuse_with_const_in_argument():
def before():
bb = relax.BlockBuilder()
x1 = relax.Var("x1", [10, 20], relax.DynTensorType(2, "float32"))
x2 = relax.Var("x2", [], relax.DynTensorType(0, "float32"))
with bb.function("fused_add_exp_squeeze", [x1, x2], attrs={"Primitive": True}):
with bb.dataflow():
lv0 = bb.emit_te(topi.add, x1, x2)
lv1 = bb.emit_te(topi.exp, lv0)
gv = bb.emit_output(bb.call_te(topi.squeeze, lv1))
bb.emit_func_output(gv)
mod = bb.get()
func_gv = mod.get_global_var("fused_add_exp_squeeze")
x = relax.Var("x", [10, 20], relax.DynTensorType(2, "float32"))
with bb.function("main", [x]):
with bb.dataflow():
lv = bb.emit(relax.Call(func_gv, [x, relax.const(1, "float32")]))
gv = bb.emit_output(lv)
bb.emit_func_output(gv)
return bb.get()
def expected():
def fused_add_exp_squeeze(x, y):
add = topi.add(x, y)
exp = topi.exp(add)
squeeze = topi.squeeze(exp)
return squeeze
bb = relax.BlockBuilder()
x = relax.Var("x", [10, 20], relax.DynTensorType(2, "float32"))
with bb.function("main", [x]):
with bb.dataflow():
lv = bb.emit_te(fused_add_exp_squeeze, x, relax.const(1, "float32"))
gv = bb.emit_output(lv)
bb.emit_func_output(gv)
return bb.get()
_check(before(), expected())
def test_fuse_tuple_output():
def before():
bb = relax.BlockBuilder()
x = relax.Var("x", [10, 20], relax.DynTensorType(2, "float32"))
p0 = relax.Var("p0", (), relax.DynTensorType(0, "float32"))
with bb.function("fused_add_exp", [x, p0], attrs={"Primitive": True}):
with bb.dataflow():
gv0 = bb.emit_output(bb.call_te(topi.add, x, p0))
gv1 = bb.emit_output(bb.call_te(topi.exp, gv0))
bb.emit_func_output(relax.Tuple([gv0, gv1]))
fused_add_exp = bb.get().get_global_var("fused_add_exp")
x = relax.Var("x", [10, 20], relax.DynTensorType(2, "float32"))
with bb.function("main", [x, p0]):
with bb.dataflow():
gv = bb.emit_output(relax.Call(fused_add_exp, [x, p0]))
bb.emit_func_output(gv)
return bb.get()
def expected():
def fused_add_exp(x, p0):
add = topi.add(x, p0)
exp = topi.exp(add)
return add, exp
bb = relax.BlockBuilder()
x = relax.Var("x", [10, 20], relax.DynTensorType(2, "float32"))
p0 = relax.Var("p0", (), relax.DynTensorType(0, "float32"))
with bb.function("main", [x, p0]):
with bb.dataflow():
gv = bb.emit_output(bb.call_te(fused_add_exp, x, p0))
bb.emit_func_output(gv)
return bb.get()
_check(before(), expected())
def test_fuse_with_immediate_tuple():
def before():
bb = relax.BlockBuilder()
x = relax.Var("x", [10, 20], relax.DynTensorType(2, "float32"))
y = relax.Var("y", [10, 20], relax.DynTensorType(2, "float32"))
with bb.function("fused_add", [x, y], attrs={"Primitive": True}):
with bb.dataflow():
lv_tuple = bb.emit(relax.Tuple([x, relax.Tuple([x, y])]))
lv_x = bb.emit(relax.TupleGetItem(lv_tuple, 0))
lv0 = bb.emit(relax.TupleGetItem(lv_tuple, 1))
lv_y = bb.emit(relax.TupleGetItem(lv0, 1))
gv = bb.emit_output(bb.call_te(topi.add, lv_x, lv_y))
bb.emit_func_output(gv)
fused_add = bb.get().get_global_var("fused_add")
x = relax.Var("x", [10, 20], relax.DynTensorType(2, "float32"))
y = relax.Var("y", [10, 20], relax.DynTensorType(2, "float32"))
with bb.function("main", [x, y]):
with bb.dataflow():
gv = bb.emit_output(relax.Call(fused_add, [x, y]))
bb.emit_func_output(gv)
return bb.get()
def expected():
bb = relax.BlockBuilder()
x = relax.Var("x", [10, 20], relax.DynTensorType(2, "float32"))
y = relax.Var("y", [10, 20], relax.DynTensorType(2, "float32"))
with bb.function("main", [x, y]):
with bb.dataflow():
gv = bb.emit_output(bb.call_te(topi.add, x, y, primfunc_name_hint="fused_add"))
bb.emit_func_output(gv)
return bb.get()
_check(before(), expected())
def test_fuse_return_partial_result():
def te_argmax_idx_val(val):
from tvm import te
def f_combine(x, y):
lhs = tvm.tir.Select((x[1] >= y[1]), x[0], y[0])
rhs = tvm.tir.Select((x[1] >= y[1]), x[1], y[1])
return lhs, rhs
def f_identity(dtype0: tvm.DataType, dtype1: tvm.DataType):
return tvm.tir.const(-1, dtype0), tvm.te.min_value(dtype1)
argmax = te.comm_reducer(f_combine, f_identity, name="argmax")
m, n = val.shape
k = te.reduce_axis((0, n), "k")
max_idx, max_val = te.compute(
(m,), lambda i: argmax((k.var, val[i, k]), axis=k), name="argmax"
)
return max_idx, max_val
def before():
bb = relax.BlockBuilder()
x = relax.Var("x", [10, 20], relax.DynTensorType(2, "float32"))
offset = relax.Var("offset", [10], relax.DynTensorType(1, "int32"))
with bb.function("fused_argmax_add", [x, offset], attrs={"Primitive": True}):
with bb.dataflow():
lv = bb.emit_te(te_argmax_idx_val, x)
idx = bb.emit(relax.TupleGetItem(lv, 0))
gv = bb.emit_output(bb.call_te(topi.add, idx, offset))
bb.emit_func_output(gv)
mod = bb.get()
func_gv = mod.get_global_var("fused_argmax_add")
x = relax.Var("x", [10, 20], relax.DynTensorType(2, "float32"))
offset = relax.Var("x", [10], relax.DynTensorType(1, "int32"))
with bb.function("main", [x, offset]):
with bb.dataflow():
gv = bb.emit_output(relax.Call(func_gv, [x, offset]))
bb.emit_func_output(gv)
return bb.get()
def expected():
def fused_argmax_add(x, offset):
idx, value = te_argmax_idx_val(x)
idx = topi.add(idx, offset)
return idx
bb = relax.BlockBuilder()
x = relax.Var("x", [10, 20], relax.DynTensorType(2, "float32"))
offset = relax.Var("offset", [10], relax.DynTensorType(1, "int32"))
with bb.function("main", [x, offset]):
with bb.dataflow():
gv = bb.emit_output(bb.call_te(fused_argmax_add, x, offset))
bb.emit_func_output(gv)
return bb.get()
_check(before(), expected())
if __name__ == "__main__":
sys.exit(pytest.main([__file__] + sys.argv[1:]))
| 37.965217
| 99
| 0.547229
| 2,907
| 21,830
| 3.9398
| 0.071207
| 0.062342
| 0.036672
| 0.047498
| 0.82319
| 0.805379
| 0.763643
| 0.744696
| 0.731337
| 0.701388
| 0
| 0.04087
| 0.307329
| 21,830
| 574
| 100
| 38.031359
| 0.716553
| 0.039762
| 0
| 0.672414
| 0
| 0
| 0.049857
| 0.008118
| 0
| 0
| 0
| 0
| 0.002155
| 1
| 0.103448
| false
| 0
| 0.012931
| 0.002155
| 0.193966
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 6
|
40ca55ae4ebf6727a09c5821a5885f0a918b79a4
| 79
|
py
|
Python
|
forecast/pipelines/__init__.py
|
ADGEfficiency/forecast
|
69d636d4fa081a81c70c18d2a3cb8a60db00b493
|
[
"MIT"
] | 16
|
2018-08-10T09:28:52.000Z
|
2021-09-02T16:59:08.000Z
|
forecast/pipelines/__init__.py
|
l-leo/forecast
|
69d636d4fa081a81c70c18d2a3cb8a60db00b493
|
[
"MIT"
] | 11
|
2019-02-20T14:19:49.000Z
|
2022-02-09T23:50:04.000Z
|
forecast/pipelines/__init__.py
|
l-leo/forecast
|
69d636d4fa081a81c70c18d2a3cb8a60db00b493
|
[
"MIT"
] | 6
|
2019-01-24T08:59:40.000Z
|
2021-04-08T14:34:18.000Z
|
from .pipelines import *
from .seasonality import *
from .time_series import *
| 19.75
| 26
| 0.772152
| 10
| 79
| 6
| 0.6
| 0.333333
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.151899
| 79
| 3
| 27
| 26.333333
| 0.895522
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| true
| 0
| 1
| 0
| 1
| 0
| 1
| 0
| 0
| null | 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 0
| 1
| 0
|
0
| 6
|
dc193008b19b5347f8bd3d76ef7907fdb995b156
| 186
|
py
|
Python
|
sanad_customisations/sanad_customisations/doctype/ticketing_and_travel_management/test_ticketing_and_travel_management.py
|
the-bantoo/sanad-customizations
|
2413134d5bb6ee0f87ce0e6ac57a9235814e76e9
|
[
"MIT"
] | null | null | null |
sanad_customisations/sanad_customisations/doctype/ticketing_and_travel_management/test_ticketing_and_travel_management.py
|
the-bantoo/sanad-customizations
|
2413134d5bb6ee0f87ce0e6ac57a9235814e76e9
|
[
"MIT"
] | null | null | null |
sanad_customisations/sanad_customisations/doctype/ticketing_and_travel_management/test_ticketing_and_travel_management.py
|
the-bantoo/sanad-customizations
|
2413134d5bb6ee0f87ce0e6ac57a9235814e76e9
|
[
"MIT"
] | 1
|
2022-01-30T15:19:10.000Z
|
2022-01-30T15:19:10.000Z
|
# Copyright (c) 2021, Bantoo Accounting Innovations and Contributors
# See license.txt
# import frappe
import unittest
class TestTicketingAndTravelManagement(unittest.TestCase):
pass
| 20.666667
| 68
| 0.817204
| 20
| 186
| 7.6
| 0.9
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.02454
| 0.123656
| 186
| 8
| 69
| 23.25
| 0.907975
| 0.516129
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| true
| 0.333333
| 0.333333
| 0
| 0.666667
| 0
| 1
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 1
| 1
| 0
| 1
| 0
|
0
| 6
|
90556bf73579522da9e1e8b38cb279ee8f614c9f
| 8,422
|
py
|
Python
|
CureIAM/workers.py
|
gojek/CureIAM
|
83cdf6ef4d61b563ae8ac69fbf008f8338f6361f
|
[
"Apache-2.0"
] | 17
|
2021-11-10T08:32:31.000Z
|
2022-03-03T12:20:38.000Z
|
CureIAM/workers.py
|
gojekfarm/CureIAM
|
83cdf6ef4d61b563ae8ac69fbf008f8338f6361f
|
[
"Apache-2.0"
] | null | null | null |
CureIAM/workers.py
|
gojekfarm/CureIAM
|
83cdf6ef4d61b563ae8ac69fbf008f8338f6361f
|
[
"Apache-2.0"
] | 3
|
2021-11-11T17:20:16.000Z
|
2021-12-02T20:00:34.000Z
|
"""Worker functions.
"""
import logging
from CureIAM import util
_log = logging.getLogger(__name__)
def cloud_worker(audit_key, audit_version, plugin_key, plugin_config,
output_queues):
"""Worker function for cloud plugins.
This function instantiates a plugin object from the
``plugin_config`` dictionary. This function expects the plugin
object to implement a ``read`` method that yields records. This
function calls this ``read`` method to retrieve records and puts
each record into each queue in ``output_queues``.
Arguments:
audit_key (str): Audit key name in configuration.
audit_version (str): Audit version string.
plugin_key (str): Plugin key name in configuration.
plugin_config (dict): Cloud plugin config dictionary.
output_queues (list): List of :class:`multiprocessing.Queue`
objects to write records to.
"""
worker_name = audit_key + '_' + plugin_key
_log.info('cloud_worker: %s: Started', worker_name)
try:
plugin = util.load_plugin(plugin_config)
for record in plugin.read():
record['com'] = util.merge_dicts(record.get('com', {}), {
'audit_key': audit_key,
'audit_version': audit_version,
'origin_key': plugin_key,
'origin_class': type(plugin).__name__,
'origin_worker': worker_name,
'origin_type': 'cloud',
})
for q in output_queues:
q.put(record)
plugin.done()
except Exception as e:
_log.exception('cloud_worker: %s: Failed; error: %s: %s',
worker_name, type(e).__name__, e)
_log.info('cloud_worker: %s: Stopped', worker_name)
def processor_worker(audit_key, audit_version, plugin_key, plugin_config,
input_queue, output_queues):
"""Worker function for processor plugins.
This function instantiates a plugin object from the
``plugin_config`` dictionary. This function expects the plugin
object to implement an ``eval`` method that accepts a single record
as a parameter and yields one or more records, and a ``done`` method
to perform cleanup work in the end.
This function gets records from ``input_queue`` and passes each
record to the ``eval`` method of the plugin object. Then it puts
each record yielded by the ``eval`` method into each queue in
``output_queues``.
When there are no more records in the ``input_queue``, i.e., once
``None`` is found in the ``input_queue``, this function calls the
``done`` method of the plugin object to indicate that record
processing is over.
Arguments:
audit_key (str): Audit key name in configuration.
audit_version (str): Audit version string.
plugin_key (str): Plugin key name in configuration.
plugin_config (dict): processor plugin config dictionary.
input_queue (multiprocessing.Queue): Queue to read records from.
output_queues (list): List of :class:`multiprocessing.Queue`
objects to write records to.
"""
worker_name = audit_key + '_' + plugin_key
_log.info('processor_worker: %s: Started', worker_name)
try:
plugin = util.load_plugin(plugin_config)
except Exception as e:
_log.exception('processor_worker: %s: Failed; error: %s: %s',
worker_name, type(e).__name__, e)
_log.info('processor_worker: %s: Stopped', worker_name)
return
while True:
try:
record = input_queue.get()
if record is None:
_log.info('processor_worker: %s: Stopping', worker_name)
plugin.done()
break
for processor_record in plugin.eval(record):
processor_record['com'] = \
util.merge_dicts(processor_record.get('com', {}), {
'audit_key': audit_key,
'audit_version': audit_version,
'origin_key': plugin_key,
'origin_class': type(plugin).__name__,
'origin_worker': worker_name,
'origin_type': 'processor',
})
for q in output_queues:
q.put(processor_record)
except Exception as e:
_log.exception('processor_worker: %s: Failed; error: %s: %s',
worker_name, type(e).__name__, e)
_log.info('processor_worker: %s: Stopped', worker_name)
def store_worker(audit_key, audit_version, plugin_key, plugin_config,
input_queue):
"""Worker function for store plugins.
This function instantiates a plugin object from the
``plugin_config`` dictionary. This function expects the plugin
object to implement a ``write`` method that accepts a single record
as a parameter and a ``done`` method to perform cleanup work in the
end.
This function gets records from ``input_queue`` and passes each
record to the ``write`` method of the plugin object.
When there are no more records in the ``input_queue``, i.e., once
``None`` is found in the ``input_queue``, this function calls the
``done`` method of the plugin object to indicate that record
processing is over.
Arguments:
audit_key (str): Audit key name in configuration.
audit_version (str): Audit version string.
plugin_key (str): Plugin key name in configuration.
plugin_config (dict): Store plugin config dictionary.
input_queue (multiprocessing.Queue): Queue to read records from.
"""
_write_worker(audit_key, audit_version, plugin_key, plugin_config,
input_queue, 'store')
def alert_worker(audit_key, audit_version, plugin_key, plugin_config,
input_queue):
"""Worker function for alert plugins.
This function behaves like :func:`CureIAM.workers.store_worker`.
See its documentation for details.
Arguments:
audit_key (str): Audit key name in configuration.
audit_version (str): Audit version string.
plugin_key (str): Plugin key name in configuration.
plugin_config (dict): Alert plugin config dictionary.
input_queue (multiprocessing.Queue): Queue to read records from.
"""
_write_worker(audit_key, audit_version, plugin_key, plugin_config,
input_queue, 'alert')
def _write_worker(audit_key, audit_version, plugin_key, plugin_config,
input_queue, worker_type):
"""Worker function for store and alert plugins.
Arguments:
audit_key (str): Audit key name in configuration
audit_version (str): Audit version string.
plugin_key (str): Plugin key name in configuration.
plugin_config (dict): Store or alert plugin config dictionary.
input_queue (multiprocessing.Queue): Queue to read records from.
worker_type (str): Either ``'store'`` or ``'alert'``.
"""
worker_name = audit_key + '_' + plugin_key
_log.info('%s_worker: %s: Started', worker_type, worker_name)
try:
plugin = util.load_plugin(plugin_config)
except Exception as e:
_log.exception('%s_worker: %s: Failed; error: %s: %s',
worker_type, worker_name, type(e).__name__, e)
_log.info('%s_worker: %s: Stopped', worker_type, worker_name)
return
while plugin is not None:
try:
record = input_queue.get()
if record is None:
_log.info('%s_worker: %s: Stopping',
worker_type, worker_name)
plugin.done()
break
record['com'] = util.merge_dicts(record.get('com', {}), {
'audit_key': audit_key,
'audit_version': audit_version,
'target_key': plugin_key,
'target_class': type(plugin).__name__,
'target_worker': worker_name,
'target_type': worker_type,
})
plugin.write(record)
except Exception as e:
_log.exception('%s_worker: %s: Failed; error: %s: %s',
worker_type, worker_name, type(e).__name__, e)
_log.info('%s_worker: %s: Stopped', worker_type, worker_name)
| 37.431111
| 73
| 0.619449
| 1,021
| 8,422
| 4.880509
| 0.125367
| 0.041742
| 0.033915
| 0.040136
| 0.837046
| 0.79129
| 0.768413
| 0.757174
| 0.750351
| 0.740919
| 0
| 0
| 0.28758
| 8,422
| 224
| 74
| 37.598214
| 0.8305
| 0.426265
| 0
| 0.636364
| 0
| 0
| 0.155654
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0.050505
| false
| 0
| 0.020202
| 0
| 0.090909
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 6
|
90a0352b231b74d8151079f3d16e146f70cdd82a
| 107
|
py
|
Python
|
{{cookiecutter.project_slug}}/app/services/__init__.py
|
youngsoul/cookiecutter-flask-creative-tim-starter
|
6ad544a06b1ad4f35e39122a1198bcd84b6d064e
|
[
"MIT"
] | null | null | null |
{{cookiecutter.project_slug}}/app/services/__init__.py
|
youngsoul/cookiecutter-flask-creative-tim-starter
|
6ad544a06b1ad4f35e39122a1198bcd84b6d064e
|
[
"MIT"
] | null | null | null |
{{cookiecutter.project_slug}}/app/services/__init__.py
|
youngsoul/cookiecutter-flask-creative-tim-starter
|
6ad544a06b1ad4f35e39122a1198bcd84b6d064e
|
[
"MIT"
] | null | null | null |
from flask import Blueprint
auth_bp = Blueprint('auth', __name__)
from app.services import auth_services
| 17.833333
| 38
| 0.803738
| 15
| 107
| 5.333333
| 0.6
| 0.325
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.130841
| 107
| 5
| 39
| 21.4
| 0.860215
| 0
| 0
| 0
| 0
| 0
| 0.037383
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| false
| 0
| 0.666667
| 0
| 0.666667
| 0.666667
| 1
| 0
| 0
| null | 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 1
|
0
| 6
|
90b97cb9aaf3b5d66c3e8d9c6d5777896c890107
| 104
|
py
|
Python
|
pylie/__init__.py
|
tussedrotten/pylie
|
df34b820b9d9273bc9c4287e559e5d5837faf794
|
[
"BSD-3-Clause"
] | 6
|
2020-09-25T07:38:00.000Z
|
2022-03-11T09:09:22.000Z
|
pylie/__init__.py
|
tussedrotten/pylie
|
df34b820b9d9273bc9c4287e559e5d5837faf794
|
[
"BSD-3-Clause"
] | 1
|
2020-10-29T08:39:34.000Z
|
2021-02-09T14:07:09.000Z
|
pylie/__init__.py
|
tussedrotten/pylie
|
df34b820b9d9273bc9c4287e559e5d5837faf794
|
[
"BSD-3-Clause"
] | 1
|
2020-11-21T12:33:44.000Z
|
2020-11-21T12:33:44.000Z
|
from pylie.so2 import SO2
from pylie.se2 import SE2
from pylie.so3 import SO3
from pylie.se3 import SE3
| 20.8
| 25
| 0.807692
| 20
| 104
| 4.2
| 0.35
| 0.428571
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.090909
| 0.153846
| 104
| 4
| 26
| 26
| 0.863636
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| true
| 0
| 1
| 0
| 1
| 0
| 1
| 0
| 0
| null | 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 0
| 1
| 0
|
0
| 6
|
90f38db8467d98cd579e015af1b1eb6245a01aa4
| 28
|
py
|
Python
|
serverless_crud/graphql/__init__.py
|
epsylabs/python-serverless-crud
|
71a79c4eb86e177a543e0af90c1dcf839996be34
|
[
"MIT"
] | null | null | null |
serverless_crud/graphql/__init__.py
|
epsylabs/python-serverless-crud
|
71a79c4eb86e177a543e0af90c1dcf839996be34
|
[
"MIT"
] | null | null | null |
serverless_crud/graphql/__init__.py
|
epsylabs/python-serverless-crud
|
71a79c4eb86e177a543e0af90c1dcf839996be34
|
[
"MIT"
] | null | null | null |
from .api import GraphQLAPI
| 14
| 27
| 0.821429
| 4
| 28
| 5.75
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.142857
| 28
| 1
| 28
| 28
| 0.958333
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| true
| 0
| 1
| 0
| 1
| 0
| 1
| 1
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 0
| 1
| 0
|
0
| 6
|
29005b96f42f8f70444bcc5417179f7bcca05d85
| 26,071
|
py
|
Python
|
main.py
|
bilzkaist/APPLE
|
0d887c9d1571c92915ad5b442d54f103e5a0726d
|
[
"MIT"
] | null | null | null |
main.py
|
bilzkaist/APPLE
|
0d887c9d1571c92915ad5b442d54f103e5a0726d
|
[
"MIT"
] | null | null | null |
main.py
|
bilzkaist/APPLE
|
0d887c9d1571c92915ad5b442d54f103e5a0726d
|
[
"MIT"
] | null | null | null |
#*****************************************************************************
#
# APPLE: Accelerating Particle filter for Positioning and Localization Estimation Code.
# Written by Bilal Dastagir and Omer Tariq.
# Feb, 1st, 2022
#
#******************************************************************************
from __future__ import absolute_import
import random
import math
import bisect
import time as tm
from apple import *
from draw import Maze
#Global Variables
BETA = [0]
ALPHA = [1]
BRAVO = [2]
CHARLIE = [3]
PARTICLESNEW = 0
PARTICLESOLD = 1
"""
# Smaller maze
maze_data = ( ( 2, 0, 1, 0, 0 ),
( 0, 0, 0, 0, 1 ),
( 1, 1, 1, 0, 0 ),
( 1, 0, 0, 0, 0 ),
( 0, 0, 2, 0, 1 ))
"""
# 0 - empty square
# 1 - occupied square
# 2 - occupied square with a beacon at each corner, detectable by the robot
maze_data_Original = ( ( 1, 1, 0, 0, 2, 0, 0, 0, 0, 1 ),
( 1, 2, 0, 0, 1, 1, 0, 0, 0, 0 ),
( 0, 1, 1, 0, 0, 0, 0, 1, 0, 1 ),
( 0, 0, 0, 0, 1, 0, 0, 1, 1, 2 ),
( 1, 1, 0, 1, 1, 2, 0, 0, 1, 0 ),
( 1, 1, 1, 0, 1, 1, 1, 0, 2, 0 ),
( 2, 0, 0, 0, 0, 0, 0, 0, 0, 0 ),
( 1, 2, 0, 1, 1, 1, 1, 0, 0, 0 ),
( 0, 0, 0, 0, 1, 0, 0, 0, 1, 0 ),
( 0, 0, 1, 0, 0, 2, 1, 1, 1, 0 ))
maze_data = ( ( 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1),
( 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1),
( 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1),
( 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1),
( 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1),
( 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1),
( 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1),
( 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1),
( 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1),
( 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1),
( 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1),
( 1, 1, 1, 1, 1, 1, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1),
( 1, 1, 1, 1, 1, 1, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1),
( 1, 1, 1, 1, 1, 1, 0, 0, 1, 1, 1, 1, 1, 1, 1, 1, 0, 0, 2, 1, 1, 1, 1, 1, 1, 0, 0, 1, 1, 0, 1, 1, 1, 1, 1, 1, 1, 1, 1, 0, 0, 0, 1, 1, 0, 0, 1, 1, 1, 1, 1, 1, 1, 0, 0, 0, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1),
( 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 0, 0, 1, 1, 1, 1, 1, 1, 1, 0, 0, 1, 1, 0, 1, 1, 1, 1, 1, 1, 1, 1, 1, 0, 0, 0, 1, 1, 0, 0, 1, 1, 1, 1, 1, 1, 1, 0, 0, 0, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1),
( 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 0, 0, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 0, 1, 1, 1, 1, 1, 1, 1, 1, 1, 0, 0, 0, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 0, 0, 0, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1),
( 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 0, 0, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 0, 1, 1, 1, 1, 1, 1, 1, 1, 2, 0, 0, 0, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 0, 0, 0, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1),
( 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 0, 0, 0, 0, 1, 1, 1, 1, 1, 1, 1, 1, 1, 0, 1, 1, 1, 1, 1, 1, 1, 1, 1, 0, 0, 0, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 0, 0, 0, 0, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1),
( 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 0, 0, 0, 0, 2, 1, 1, 1, 0, 1, 1, 1, 1, 0, 1, 1, 1, 1, 1, 1, 1, 1, 1, 0, 0, 0, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 0, 0, 0, 0, 1, 1, 1, 2, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1),
( 1, 1, 1, 1, 1, 1, 1, 1, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 1, 1, 1, 1, 1, 1, 1),
( 1, 1, 1, 1, 1, 1, 1, 1, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 1, 1, 1, 1, 1, 1, 1),
( 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1),
( 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1),
( 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1),
( 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1),
( 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1),
( 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1),
( 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1),
( 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1),
( 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1),
( 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1),
( 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1),
)
maze_data_bk = ( ( 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1),
( 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1),
( 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1),
( 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1),
( 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1),
( 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1),
( 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1),
( 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1),
( 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1),
( 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1),
( 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1),
( 1, 1, 1, 1, 1, 1, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1),
( 1, 1, 1, 1, 1, 1, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1),
( 1, 1, 1, 1, 1, 1, 0, 0, 2, 1, 1, 1, 1, 1, 1, 1, 0, 0, 2, 1, 1, 1, 1, 1, 1, 0, 0, 1, 2, 0, 1, 1, 1, 1, 1, 1, 1, 1, 2, 0, 0, 0, 2, 1, 0, 0, 1, 1, 1, 1, 1, 1, 2, 0, 0, 0, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1),
( 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 0, 0, 1, 1, 1, 1, 1, 1, 1, 0, 0, 1, 1, 0, 1, 1, 1, 1, 1, 1, 1, 1, 1, 0, 0, 0, 1, 1, 0, 0, 1, 1, 1, 1, 1, 1, 1, 0, 0, 0, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1),
( 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 0, 0, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 0, 1, 1, 1, 1, 1, 1, 1, 1, 1, 0, 0, 0, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 0, 0, 0, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1),
( 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 0, 0, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 0, 1, 1, 1, 1, 1, 1, 1, 1, 1, 0, 0, 0, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 0, 0, 0, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1),
( 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 0, 0, 0, 0, 1, 1, 1, 1, 1, 1, 1, 1, 1, 0, 1, 1, 1, 1, 1, 1, 1, 1, 1, 0, 0, 0, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 0, 0, 0, 0, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1),
( 1, 1, 1, 1, 1, 1, 1, 1, 2, 1, 1, 1, 1, 1, 1, 1, 0, 0, 0, 0, 2, 1, 1, 1, 1, 1, 1, 1, 2, 0, 1, 1, 1, 1, 1, 1, 1, 1, 2, 0, 0, 0, 2, 1, 1, 1, 1, 1, 1, 1, 1, 2, 0, 0, 0, 0, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1),
( 1, 1, 1, 1, 1, 1, 1, 1, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 1, 1, 1, 1, 1, 1, 1),
( 1, 1, 1, 1, 1, 1, 1, 1, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 1, 1, 1, 1, 1, 1, 1),
( 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1),
( 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1),
( 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1),
( 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1),
( 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1),
( 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1),
( 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1),
( 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1),
( 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1),
( 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1),
( 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1),
)
PARTICLE_COUNT = 2000 # Total number of particles
ROBOT_HAS_COMPASS = True # Does the robot know where north is? If so, it
# makes orientation a lot easier since it knows which direction it is facing.
# If not -- and that is really fascinating -- the particle filter can work
# out its heading too, it just takes more particles and more time. Try this
# with 3000+ particles, it obviously needs lots more hypotheses as a particle
# now has to correctly match not only the position but also the heading.
# ------------------------------------------------------------------------
# Some utility functions
def add_noise(level, *coords):
return [x + random.uniform(-level, level) for x in coords]
def add_little_noise(*coords):
return add_noise(0.02, *coords)
def add_some_noise(*coords):
return add_noise(0.1, *coords)
# This is just a gaussian kernel I pulled out of my hat, to transform
# values near to robbie's measurement => 1, further away => 0
sigma2 = 0.9 ** 2
def w_gauss(a, b):
error = a - b
g = math.e ** -(error ** 2 / (2 * sigma2))
return g
# ------------------------------------------------------------------------
def compute_mean_point(particles, world):
"""
Compute the mean for all particles that have a reasonably good weight.
This is not part of the particle filter algorithm but rather an
addition to show the "best belief" for current position.
"""
m_x, m_y, m_count = 0, 0, 0
for p in particles:
m_count += p.w
m_x += p.x * p.w
m_y += p.y * p.w
if m_count == 0:
return -1, -1, False
m_x /= m_count
m_y /= m_count
# Now compute how good that mean is -- check how many particles
# actually are in the immediate vicinity
m_count = 0
for p in particles:
if world.distance(p.x, p.y, m_x, m_y) < 1:
m_count += 1
return m_x, m_y, m_count > PARTICLE_COUNT * 0.95
# ------------------------------------------------------------------------
class WeightedDistribution(object):
def __init__(self, state):
accum = 0.0
self.state = [p for p in state if p.w > 0]
self.distribution = []
for x in self.state:
accum += x.w
self.distribution.append(accum)
def pick(self):
try:
return self.state[bisect.bisect_left(self.distribution, random.uniform(0, 1))]
except IndexError:
# Happens when all particles are improbable w=0
return None
# ------------------------------------------------------------------------
class Particle(object):
def __init__(self, x, y, heading=None, w=1, noisy=False):
if heading is None:
heading = random.uniform(0, 360)
if noisy:
x, y, heading = add_some_noise(x, y, heading)
self.x = x
self.y = y
self.h = heading
self.w = w
def __repr__(self):
return "(%f, %f, w=%f)" % (self.x, self.y, self.w)
@property
def xy(self):
return self.x, self.y
@property
def xyh(self):
return self.x, self.y, self.h
@classmethod
def create_random(cls, count, maze):
return [cls(*maze.random_free_place()) for _ in range(0, count)]
def read_sensor(self, maze):
"""
Find distance to nearest beacon.
"""
return maze.distance_to_nearest_beacon(*self.xy)
def advance_by(self, speed, checker=None, noisy=False):
h = self.h
if noisy:
speed, h = add_little_noise(speed, h)
h += random.uniform(-3, 3) # needs more noise to disperse better
r = math.radians(h)
dx = math.sin(r) * speed
dy = math.cos(r) * speed
if checker is None or checker(self, dx, dy):
self.move_by(dx, dy)
return True
return False
def move_by(self, x, y):
self.x += x
self.y += y
# ------------------------------------------------------------------------
class Robot(Particle):
speed = 0.2
def __init__(self, maze):
super(Robot, self).__init__(*maze.random_free_place(), heading=90)
self.chose_random_direction()
self.step_count = 0
def chose_random_direction(self):
heading = random.uniform(0, 360)
self.h = heading
def read_sensor(self, maze):
"""
Poor robot, it's sensors are noisy and pretty strange,
it only can measure the distance to the nearest beacon(!)
and is not very accurate at that too!
"""
return add_little_noise(super(Robot, self).read_sensor(maze))[0]
def move(self, maze):
"""
Move the robot. Note that the movement is stochastic too.
"""
while True:
self.step_count += 1
if self.advance_by(self.speed, noisy=True,
checker=lambda r, dx, dy: maze.is_free(r.x+dx, r.y+dy)):
break
# Bumped into something or too long in same direction,
# chose random new direction
self.chose_random_direction()
# ------------------------------------------------------------------------
def runSimulation(particlesOld,particleFlag):
print("Simulation Started : ")
world = Maze(maze_data)
world.draw()
totalTime = 0
start = tm.time()
# initial distribution assigns each particle an equal probability
if (particleFlag == PARTICLESNEW):
particles = Particle.create_random(PARTICLE_COUNT, world)
else:
particles = particlesOld
robbie = Robot(world)
i = 0
while True:
# pauseInitial= tm.time()
i = i + 1
# Read robbie's sensor
r_d = robbie.read_sensor(world)
# Update particle weight according to how good every particle matches
# robbie's sensor reading
for p in particles:
if world.is_free(*p.xy):
p_d = p.read_sensor(world)
p.w = w_gauss(r_d, p_d)
else:
p.w = 0
# ---------- Try to find current best estimate for display ----------
m_x, m_y, m_confident = compute_mean_point(particles, world)
# ---------- Show current state ----------
world.show_particles(particles)
world.show_mean(m_x, m_y, m_confident)
world.show_robot(robbie)
# ---------- Shuffle particles ----------
new_particles = []
# Normalise weights
nu = sum(p.w for p in particles)
if nu:
for p in particles:
p.w = p.w / nu
# create a weighted distribution, for fast picking
dist = WeightedDistribution(particles)
for _ in particles:
p = dist.pick()
if p is None: # No pick b/c all totally improbable
new_particle = Particle.create_random(1, world)[0]
else:
new_particle = Particle(p.x, p.y,
heading=robbie.h if ROBOT_HAS_COMPASS else p.h,
noisy=True)
new_particles.append(new_particle)
particles = new_particles
# ---------- Move things ----------
old_heading = robbie.h
robbie.move(world)
d_h = robbie.h - old_heading
# Move particles according to my belief of movement (this may
# be different than the real movement, but it's all I got)
for p in particles:
p.h += d_h # in case robot changed heading, swirl particle heading too
p.advance_by(robbie.speed)
#pauseFinal = tm.time()
# executionTime = (pauseFinal - pauseInitial)
if(m_confident):
break
# else:
# print("Loop = ",i ," ->The Execution Time = ", executionTime)
stop = tm.time()
executionTimeFinal = (stop - start)
#print("Particles = ", particles)
print("The Total Execution Time = ", executionTimeFinal, " Seconds")
return particles
def run_beta():
print("Beta Program is Started........... !!!")
# Write code Here
particles = runSimulation(0,PARTICLESNEW)
#while True:
# particles = runSimulation(particles,PARTICLESNEW)
print("Beta Program is Ended Successfully !!!")
return BETA
def run_alpha():
print("Alpha Program is Started........... !!!")
# Write code Here
print("Alpha Program is Ended Successfully !!!")
return ALPHA
def run_bravo():
print("Bravo Program is Started........... !!!")
# Write code Here
print("Bravo Program is Ended Successfully !!!")
return BRAVO
def run_charlie():
print("Charlie Program is Started........... !!!")
# Write code Here
print("Charlie Program is Ended Successfully !!!")
return CHARLIE
def switch_mode(mode):
# Program Started
switcher = {
0: run_beta,
1: run_alpha,
2: run_bravo,
3: run_charlie
}
# Get the function from switcher dictionary
func = switcher.get(mode, lambda: "Invalid mode")
# Execute the function
print("Mode Selected : ",func())
# Program Ended
def run():
print("......................Main Program is Started........... !!!\n")
# write coode here
runMode = BETA
if (runMode == ALPHA):
run_alpha()
else:
run_beta()
print("\n......................Main Program is Ended Successfully !!!")
def print_hi(name):
# Use a breakpoint in the code line below to debug your script.
print(f'Hi, {name}') # Press Ctrl+F8 to toggle the breakpoint.
def print_bye(name):
# Use a breakpoint in the code line below to debug your script.
print(f'Bye, {name}') # Press Ctrl+F8 to toggle the breakpoint.
# Press the green button in the gutter to run the script.
if __name__ == '__main__':
print_hi(' Bilal Dastagir')
run()
print_bye('Bilal Dastagir')
| 59.387244
| 232
| 0.409114
| 6,018
| 26,071
| 1.746427
| 0.056497
| 0.744434
| 1.093245
| 1.431779
| 0.528639
| 0.49334
| 0.470314
| 0.456993
| 0.447669
| 0.447288
| 0
| 0.276437
| 0.340225
| 26,071
| 439
| 233
| 59.387244
| 0.334574
| 0.136205
| 0
| 0.291815
| 0
| 0
| 0.02681
| 0.002437
| 0
| 0
| 0
| 0
| 0
| 1
| 0.099644
| false
| 0.007117
| 0.024911
| 0.024911
| 0.213523
| 0.067616
| 0
| 0
| 1
| null | 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 6
|
29445f80be94360613cce6ceb2428aa2c26804f6
| 300
|
py
|
Python
|
unshortenit/modules/__init__.py
|
animeshxd/unshortenit
|
31e2772d5de0de5bda222e1b5437d114a1c483d6
|
[
"MIT"
] | 55
|
2015-04-13T20:44:35.000Z
|
2021-11-15T11:26:25.000Z
|
unshortenit/modules/__init__.py
|
DorHayun/URL-expander
|
8bd550d5371db4ae53a32a688ef7ca698e82c527
|
[
"MIT"
] | 24
|
2015-09-13T15:14:12.000Z
|
2021-11-04T14:59:30.000Z
|
unshortenit/modules/__init__.py
|
DorHayun/get-license-by-url
|
b5ace7c63ba4b9c906fac90245e6e7103abbeb69
|
[
"MIT"
] | 20
|
2015-02-19T07:10:16.000Z
|
2022-02-21T00:07:35.000Z
|
from .adfly import AdfLy # noqa
from .adfocus import AdFocus # noqa
from .shortest import ShorteSt # noqa
from .meta_refresh import MetaRefresh # noqa
from .linkbucks import LinkBucks # noqa
| 50
| 59
| 0.503333
| 26
| 300
| 5.769231
| 0.384615
| 0.213333
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.463333
| 300
| 5
| 60
| 60
| 0.931677
| 0.08
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| true
| 0
| 1
| 0
| 1
| 0
| 1
| 0
| 0
| null | 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 0
| 1
| 0
|
0
| 6
|
297fcb11231082e1874c007e13921f9298a0e202
| 95
|
py
|
Python
|
test_availability.py
|
rtavenar/uea_datasets_availability
|
8222408402b1a10b8709d4fd9f2ccf5d78a7ad6e
|
[
"BSD-2-Clause"
] | 1
|
2019-06-04T15:44:48.000Z
|
2019-06-04T15:44:48.000Z
|
test_availability.py
|
rtavenar/uea_datasets_availability
|
8222408402b1a10b8709d4fd9f2ccf5d78a7ad6e
|
[
"BSD-2-Clause"
] | null | null | null |
test_availability.py
|
rtavenar/uea_datasets_availability
|
8222408402b1a10b8709d4fd9f2ccf5d78a7ad6e
|
[
"BSD-2-Clause"
] | 1
|
2020-01-09T07:32:59.000Z
|
2020-01-09T07:32:59.000Z
|
from tslearn.datasets import UCR_UEA_datasets
print(len(UCR_UEA_datasets().list_datasets()))
| 19
| 46
| 0.821053
| 14
| 95
| 5.214286
| 0.642857
| 0.164384
| 0.383562
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.073684
| 95
| 4
| 47
| 23.75
| 0.829545
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| true
| 0
| 0.5
| 0
| 0.5
| 0.5
| 1
| 0
| 0
| null | 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 0
| 0
| 1
|
0
| 6
|
461d3d5c6dc2d58d7fffefec4697a1ec1160ee15
| 509
|
py
|
Python
|
cpc/type/VoidType.py
|
U-Ar/Cpresto
|
f723458fb237c9e3e8bc8a6afdf7c81858a65363
|
[
"BSD-3-Clause"
] | 1
|
2021-05-09T07:10:19.000Z
|
2021-05-09T07:10:19.000Z
|
cpc/type/VoidType.py
|
U-Ar/Cpresto
|
f723458fb237c9e3e8bc8a6afdf7c81858a65363
|
[
"BSD-3-Clause"
] | null | null | null |
cpc/type/VoidType.py
|
U-Ar/Cpresto
|
f723458fb237c9e3e8bc8a6afdf7c81858a65363
|
[
"BSD-3-Clause"
] | null | null | null |
from .Type import Type
class VoidType(Type):
def __init__(self):
pass
def is_void(self):
return True
def size(self):
return 1
def equals(self,other):
return isinstance(other,VoidType)
def is_same_type(self, other):
return other.is_void()
def is_compatible(self,other):
return other.is_void()
def is_castable_to(self,other):
return other.is_void()
def to_string(self):
return "void"
| 19.576923
| 41
| 0.585462
| 65
| 509
| 4.369231
| 0.353846
| 0.070423
| 0.211268
| 0.211268
| 0.320423
| 0.320423
| 0.320423
| 0.21831
| 0
| 0
| 0
| 0.002907
| 0.324165
| 509
| 26
| 42
| 19.576923
| 0.822674
| 0
| 0
| 0.166667
| 0
| 0
| 0.007843
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0.444444
| false
| 0.055556
| 0.055556
| 0.388889
| 0.944444
| 0
| 0
| 0
| 0
| null | 0
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 0
| 1
| 1
| 0
|
0
| 6
|
462187f5e25546d0c014f69efb383929ddae1ec3
| 168
|
py
|
Python
|
diff_cover/__init__.py
|
LuyaoHuang/diff-cover
|
64d7dbf4a4e5cd7d048087891df8be541e01be5a
|
[
"Apache-2.0"
] | 1
|
2019-09-24T02:30:39.000Z
|
2019-09-24T02:30:39.000Z
|
diff_cover/__init__.py
|
yafu-1/diff-cover
|
64d7dbf4a4e5cd7d048087891df8be541e01be5a
|
[
"Apache-2.0"
] | null | null | null |
diff_cover/__init__.py
|
yafu-1/diff-cover
|
64d7dbf4a4e5cd7d048087891df8be541e01be5a
|
[
"Apache-2.0"
] | 1
|
2019-09-06T06:44:09.000Z
|
2019-09-06T06:44:09.000Z
|
VERSION = '0.9.9'
DESCRIPTION = 'Automatically find diff lines that need test coverage.'
QUALITY_DESCRIPTION = 'Automatically find diff lines with quality violations.'
| 42
| 78
| 0.791667
| 22
| 168
| 6
| 0.681818
| 0.363636
| 0.424242
| 0.484848
| 0.560606
| 0
| 0
| 0
| 0
| 0
| 0
| 0.020548
| 0.130952
| 168
| 3
| 79
| 56
| 0.883562
| 0
| 0
| 0
| 0
| 0
| 0.672619
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| false
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| null | 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 6
|
463ccfc0f165839f81f22d3d1cdfdff080d3185c
| 129
|
py
|
Python
|
lib/JumpScale/lib/kvm/__init__.py
|
Jumpscale/jumpscale6_core
|
0502ddc1abab3c37ed982c142d21ea3955d471d3
|
[
"BSD-2-Clause"
] | 1
|
2015-10-26T10:38:13.000Z
|
2015-10-26T10:38:13.000Z
|
lib/JumpScale/lib/lxc/__init__.py
|
Jumpscale/jumpscale6_core
|
0502ddc1abab3c37ed982c142d21ea3955d471d3
|
[
"BSD-2-Clause"
] | null | null | null |
lib/JumpScale/lib/lxc/__init__.py
|
Jumpscale/jumpscale6_core
|
0502ddc1abab3c37ed982c142d21ea3955d471d3
|
[
"BSD-2-Clause"
] | null | null | null |
from JumpScale import j
j.base.loader.makeAvailable(j, 'system.platform.lxc')
from Lxc import Lxc
j.system.platform.lxc = Lxc()
| 21.5
| 53
| 0.767442
| 21
| 129
| 4.714286
| 0.47619
| 0.141414
| 0.30303
| 0.363636
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.108527
| 129
| 5
| 54
| 25.8
| 0.86087
| 0
| 0
| 0
| 0
| 0
| 0.148438
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| true
| 0
| 0.5
| 0
| 0.5
| 0
| 1
| 0
| 0
| null | 0
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 0
| 0
| 0
|
0
| 6
|
465fbffa75176a91eb68fe3edb85aa114e85befe
| 36
|
py
|
Python
|
lnwYod/__init__.py
|
attaporn/lnwYod
|
0fc01a8f081f6f1c0b26e94dc212f4ccab62ebaf
|
[
"MIT"
] | null | null | null |
lnwYod/__init__.py
|
attaporn/lnwYod
|
0fc01a8f081f6f1c0b26e94dc212f4ccab62ebaf
|
[
"MIT"
] | null | null | null |
lnwYod/__init__.py
|
attaporn/lnwYod
|
0fc01a8f081f6f1c0b26e94dc212f4ccab62ebaf
|
[
"MIT"
] | null | null | null |
from lnwYod.attaporn import Lnwyod
| 18
| 35
| 0.833333
| 5
| 36
| 6
| 0.8
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.138889
| 36
| 1
| 36
| 36
| 0.967742
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| true
| 0
| 1
| 0
| 1
| 0
| 1
| 1
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 0
| 1
| 0
|
0
| 6
|
468a62738c5d3c36ec7e9678c4e4311162cdfe94
| 101
|
py
|
Python
|
gym_connect_four/envs/__init__.py
|
andreigann/gym-connect-four
|
be93dc621aa5c57063a4aac8733ddb30dfb858c6
|
[
"MIT"
] | null | null | null |
gym_connect_four/envs/__init__.py
|
andreigann/gym-connect-four
|
be93dc621aa5c57063a4aac8733ddb30dfb858c6
|
[
"MIT"
] | 7
|
2020-09-25T20:35:50.000Z
|
2022-02-10T00:28:02.000Z
|
gym_connect_four/envs/__init__.py
|
andreigann/gym-connect-four
|
be93dc621aa5c57063a4aac8733ddb30dfb858c6
|
[
"MIT"
] | null | null | null |
from gym_connect_four.envs.connect_four_env import ConnectFourEnv, Player, RandomPlayer, SavedPlayer
| 50.5
| 100
| 0.881188
| 13
| 101
| 6.538462
| 0.846154
| 0.258824
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.069307
| 101
| 1
| 101
| 101
| 0.904255
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| true
| 0
| 1
| 0
| 1
| 0
| 1
| 0
| 0
| null | 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 0
| 0
| 0
|
0
| 6
|
d3bbcb8962e05ded4620c1f20f28bc4cd383d141
| 20,946
|
py
|
Python
|
tests/grid/test_datasets.py
|
gordonwatts/desktop-rucio
|
eb62fa767587685166796e996cf96107c57c5df0
|
[
"MIT"
] | null | null | null |
tests/grid/test_datasets.py
|
gordonwatts/desktop-rucio
|
eb62fa767587685166796e996cf96107c57c5df0
|
[
"MIT"
] | 7
|
2019-05-05T05:03:38.000Z
|
2019-06-30T13:56:37.000Z
|
tests/grid/test_datasets.py
|
gordonwatts/desktop-rucio
|
eb62fa767587685166796e996cf96107c57c5df0
|
[
"MIT"
] | null | null | null |
# Test out everything with datasets.
from src.grid.datasets import dataset_mgr, DatasetQueryStatus
from src.grid.rucio import RucioException
from tests.grid.utils_for_tests import simple_dataset, dummy_logger
from time import sleep
import datetime
import os
import pytest
@pytest.fixture()
def rucio_2file_dataset(simple_dataset):
class rucio_dummy:
def __init__(self, ds):
self._ds = ds
self.CountCalled = 0
self.CountCalledDL = 0
self._cache_mgr = None
def get_file_listing(self, ds_name, log_func = None):
self.CountCalled += 1
if ds_name == self._ds.Name:
return self._ds.FileList
return None
def download_files(self, ds_name, data_dir, log_func = None):
if self._cache_mgr is not None:
self._cache_mgr.add_ds(self._ds)
if log_func is not None:
log_func('downloading ' + ds_name)
self.CountCalledDL += 1
return rucio_dummy(simple_dataset)
@pytest.fixture()
def rucio_do_nothing():
class rucio_dummy:
def __init__(self):
self.CountCalled = 0
self.CountCalledDL = 0
def get_file_listing(self, ds_name, log_func = None):
self.CountCalled += 1
sleep(1)
return None
def download_files(self, ds_name, data_dir, log_func = None):
self.CountCalledDL += 1
sleep(1)
return rucio_dummy()
@pytest.fixture()
def rucio_2file_dataset_take_time(simple_dataset):
class rucio_dummy:
def __init__(self, ds):
self._ds = ds
self.CountCalled = 0
self.CountCalledDL = 0
self._cache_mgr = None
self.DLCalled = False
def get_file_listing(self, ds_name, log_func = None):
sleep(0.005)
self.CountCalled += 1
if ds_name == self._ds.Name:
return self._ds.FileList
return None
def download_files(self, ds_name, data_dir, log_func = None):
self.DLCalled = True
sleep(0.005)
if self._cache_mgr is not None:
self._cache_mgr.add_ds(self._ds)
self.CountCalledDL += 1
return rucio_dummy(simple_dataset)
@pytest.fixture()
def rucio_2file_dataset_with_fails(simple_dataset):
class rucio_dummy:
def __init__(self, ds):
self._ds = ds
self.CountCalled = 0
self.CountCalledDL = 0
self._cache_mgr = None
self.DLSleep = None
def get_file_listing(self, ds_name, log_func = None):
self.CountCalled += 1
if self.CountCalled < 5:
raise RucioException("Please Try again Due To Internet Being Out")
if ds_name == self._ds.Name:
return self._ds.FileList
return None
def download_files(self, ds_name, data_dir, log_func = None):
self.CountCalledDL += 1
if self.DLSleep is not None:
sleep(self.DLSleep)
if self.CountCalledDL < 5:
raise RucioException("Please try again due to internet being out")
if self._cache_mgr is not None:
self._cache_mgr.add_ds(self._ds)
return rucio_dummy(simple_dataset)
@pytest.fixture()
def rucio_2file_dataset_shows_up_later(simple_dataset):
class rucio_dummy:
def __init__(self, ds):
self._ds = ds
self.CountCalled = 0
def get_file_listing(self, ds_name, log_func = None):
self.CountCalled += 1
if self.CountCalled < 2:
return None
if ds_name == self._ds.Name:
return self._ds.FileList
return None
return rucio_dummy(simple_dataset)
@pytest.fixture()
def cache_empty():
'Create an empty cache that will save anything saved in it.'
class cache_good_dummy():
def __init__(self):
self._ds_list = {}
self._in_progress = []
self._in_download = []
self._downloaded_ds = {}
def get_download_directory(self):
return 'totally-bogus'
def add_ds(self, ds_info):
self._downloaded_ds[ds_info.Name] = ds_info
def get_listing(self, ds_name):
if ds_name in self._ds_list:
return self._ds_list[ds_name]
return None
def save_listing(self, ds_info):
self._ds_list[ds_info.Name] = ds_info
self._in_progress.remove(ds_info.Name)
def mark_query(self, ds_name):
self._in_progress.append(ds_name)
def query_in_progress(self, ds_name):
return ds_name in self._in_progress
def get_queries(self):
return self._in_progress
def get_ds_contents(self, ds_name):
if ds_name in self._downloaded_ds:
return [f.filename for f in self._downloaded_ds[ds_name].FileList]
return None
def mark_downloading(self, ds_name):
self._in_download.append(ds_name)
def download_in_progress(self, ds_name):
return ds_name in self._in_download
def get_downloading(self):
return self._in_download
def mark_download_done(self, ds_name):
self._in_download.remove(ds_name)
return cache_good_dummy()
@pytest.fixture()
def cache_with_ds(cache_empty, simple_dataset):
'Create a cache with a dataset called dataset1'
cache_empty.add_ds(simple_dataset)
return cache_empty
def test_dataset_query_queued(rucio_2file_dataset, cache_empty):
'Queue a dataset'
dm = dataset_mgr(cache_empty, rucio_mgr=rucio_2file_dataset)
status, files = dm.get_ds_contents('a_dataset')
# Should have queued the result since this was a new ds manager
assert status == DatasetQueryStatus.query_queued
assert None is files
def wait_some_time(check):
'Simple method to wait until check returns false. Will wait up to about a second so as not to delay things before throwing an assert.'
counter = 0
while check():
sleep(0.01)
counter += 1
assert counter < 100
def test_dataset_query_resolved(rucio_2file_dataset, cache_empty, simple_dataset):
'Queue and look for a dataset query result'
dm = dataset_mgr(cache_empty, rucio_mgr=rucio_2file_dataset)
_ = dm.get_ds_contents(simple_dataset.Name)
# Wait for the dataset query to run
wait_some_time(lambda: rucio_2file_dataset.CountCalled == 0)
# Now, make sure that we get back what we want here.
status, files = dm.get_ds_contents(simple_dataset.Name)
assert DatasetQueryStatus.results_valid == status
assert len(simple_dataset.FileList) == len(files)
# Make sure we didn't re-query for this.
assert 1 == rucio_2file_dataset.CountCalled == 1
_ = cache_empty.get_listing(simple_dataset.Name)
def test_query_for_bad_dataset(rucio_2file_dataset, cache_empty, simple_dataset):
'Ask for a bad dataset, and get back a null'
dm = dataset_mgr(cache_empty, rucio_mgr=rucio_2file_dataset)
_ = dm.get_ds_contents('bogus_ds')
wait_some_time(lambda: rucio_2file_dataset.CountCalled == 0)
# Make sure it comes back as bad.
status, files = dm.get_ds_contents('bogus_ds')
assert DatasetQueryStatus.does_not_exist == status
assert None is files
# Make sure that a timeout of an hour has been set on the dataset.
info = cache_empty.get_listing('bogus_ds')
assert datetime.datetime.now() == info.Created
def test_look_for_good_dataset_that_fails_a_bunch(rucio_2file_dataset_with_fails, cache_empty, simple_dataset):
'Queue and look for a good dataset that takes a few queries to show up with results'
dm = dataset_mgr(cache_empty, rucio_mgr=rucio_2file_dataset_with_fails, seconds_between_retries=0.01)
_ = dm.get_ds_contents(simple_dataset.Name)
# Wait for the dataset query to run
wait_some_time(lambda: rucio_2file_dataset_with_fails.CountCalled < 5)
# Now, make sure that we get back what we want and that the number of tries matches what we think
# it should have.
status, files = dm.get_ds_contents(simple_dataset.Name)
assert DatasetQueryStatus.results_valid == status
assert 5 == rucio_2file_dataset_with_fails.CountCalled
def test_two_queries_for_good_dataset(rucio_2file_dataset_take_time, cache_empty, simple_dataset):
'Make sure second query does not trigger second web download'
# Query twice, make sure we don't forget as we are doing this!
dm = dataset_mgr(cache_empty, rucio_mgr=rucio_2file_dataset_take_time)
_ = dm.get_ds_contents(simple_dataset.Name)
status, _ = dm.get_ds_contents(simple_dataset.Name)
assert DatasetQueryStatus.query_queued == status
# Wait for the dataset query to run
wait_some_time(lambda: rucio_2file_dataset_take_time.CountCalled == 0)
# Now, make sure that we get back what we want here.
status, _ = dm.get_ds_contents(simple_dataset.Name)
assert DatasetQueryStatus.results_valid == status
# Make sure we didn't re-query for this, and the expiration date is not set.
# Make sure to wait long enough for other timing stuff above to fall apart.
sleep(0.02)
assert 1 == rucio_2file_dataset_take_time.CountCalled
def test_dataset_appears(rucio_2file_dataset_shows_up_later, cache_empty, simple_dataset):
'After a bad dataset has aged, automatically queue a new query'
dm = dataset_mgr(cache_empty, rucio_mgr=rucio_2file_dataset_shows_up_later)
_ = dm.get_ds_contents(simple_dataset.Name)
wait_some_time(lambda: rucio_2file_dataset_shows_up_later.CountCalled == 0)
status, _ = dm.get_ds_contents(simple_dataset.Name)
assert DatasetQueryStatus.does_not_exist == status
# Query, but demand a quick re-check
status, _ = dm.get_ds_contents(simple_dataset.Name, maxAgeIfNotSeen=datetime.timedelta(seconds=0))
assert DatasetQueryStatus.query_queued == status
wait_some_time(lambda: rucio_2file_dataset_shows_up_later.CountCalled == 1)
status, _ = dm.get_ds_contents(simple_dataset.Name)
assert DatasetQueryStatus.results_valid == status
def test_dataset_always_missing_noretry(rucio_2file_dataset_shows_up_later, cache_empty, simple_dataset):
'Do not requery for the dataset'
dm = dataset_mgr(cache_empty, rucio_mgr=rucio_2file_dataset_shows_up_later)
_ = dm.get_ds_contents(simple_dataset.Name)
wait_some_time(lambda: rucio_2file_dataset_shows_up_later.CountCalled == 0)
status, _ = dm.get_ds_contents(simple_dataset.Name)
assert DatasetQueryStatus.does_not_exist == status
# Query, but demand a quick re-check
status, _ = dm.get_ds_contents(simple_dataset.Name, maxAgeIfNotSeen=None)
assert DatasetQueryStatus.does_not_exist == status
assert 1 == rucio_2file_dataset_shows_up_later.CountCalled
def test_dataset_always_missing_longretry(rucio_2file_dataset_shows_up_later, cache_empty, simple_dataset):
'Do not requery for the dataset'
dm = dataset_mgr(cache_empty, rucio_mgr=rucio_2file_dataset_shows_up_later)
_ = dm.get_ds_contents(simple_dataset.Name)
wait_some_time(lambda: rucio_2file_dataset_shows_up_later.CountCalled == 0)
status, _ = dm.get_ds_contents(simple_dataset.Name)
assert DatasetQueryStatus.does_not_exist == status
# Query, but demand a quick re-check
status, _ = dm.get_ds_contents(simple_dataset.Name, maxAgeIfNotSeen=datetime.timedelta(seconds=1000))
assert DatasetQueryStatus.does_not_exist == status
assert 1 == rucio_2file_dataset_shows_up_later.CountCalled
def test_good_dataset_retry(rucio_2file_dataset, cache_empty, simple_dataset):
'Do a requery for the dataset'
dm = dataset_mgr(cache_empty, rucio_mgr=rucio_2file_dataset)
_ = dm.get_ds_contents(simple_dataset.Name)
wait_some_time(lambda: rucio_2file_dataset.CountCalled == 0)
status, _ = dm.get_ds_contents(simple_dataset.Name)
assert DatasetQueryStatus.results_valid == status
# Query, but demand a quick re-check
status, _ = dm.get_ds_contents(simple_dataset.Name, maxAge=datetime.timedelta(seconds=0))
assert DatasetQueryStatus.query_queued == status
wait_some_time(lambda: rucio_2file_dataset.CountCalled == 1)
status, _ = dm.get_ds_contents(simple_dataset.Name)
assert DatasetQueryStatus.results_valid == status
assert 2 == rucio_2file_dataset.CountCalled
def test_good_dataset_longretry(rucio_2file_dataset, cache_empty, simple_dataset):
'Do not requery for the dataset'
dm = dataset_mgr(cache_empty, rucio_mgr=rucio_2file_dataset)
_ = dm.get_ds_contents(simple_dataset.Name)
wait_some_time(lambda: rucio_2file_dataset.CountCalled == 0)
status, _ = dm.get_ds_contents(simple_dataset.Name)
assert DatasetQueryStatus.results_valid == status
# Query, but demand a quick re-check
status, _ = dm.get_ds_contents(simple_dataset.Name, maxAge=datetime.timedelta(seconds=1000))
assert DatasetQueryStatus.results_valid == status
assert 1 == rucio_2file_dataset.CountCalled
def test_good_dataset_maxAgeIfNotSeenNoEffect(rucio_2file_dataset, cache_empty, simple_dataset):
'Do not requery for the dataset'
dm = dataset_mgr(cache_empty, rucio_mgr=rucio_2file_dataset)
_ = dm.get_ds_contents(simple_dataset.Name)
wait_some_time(lambda: rucio_2file_dataset.CountCalled == 0)
status, _ = dm.get_ds_contents(simple_dataset.Name)
assert DatasetQueryStatus.results_valid == status
# Query, but demand a quick re-check
status, _ = dm.get_ds_contents(simple_dataset.Name, maxAgeIfNotSeen=datetime.timedelta(seconds=0))
assert DatasetQueryStatus.results_valid == status
assert 1 == rucio_2file_dataset.CountCalled
def test_good_dataset_content_restart(rucio_do_nothing, rucio_2file_dataset, cache_empty, simple_dataset):
dm0 = dataset_mgr(cache_empty, rucio_mgr=rucio_do_nothing)
_ = dm0.get_ds_contents(simple_dataset.Name)
wait_some_time(lambda: rucio_do_nothing.CountCalled == 0)
# Start up a new one that should pick up the ball where it was dropped.
dm = dataset_mgr(cache_empty, rucio_mgr=rucio_2file_dataset)
wait_some_time(lambda: rucio_2file_dataset.CountCalled == 0)
# Now, make sure that we get back what we want here.
status, _ = dm.get_ds_contents(simple_dataset.Name)
assert DatasetQueryStatus.results_valid == status
def test_dataset_download_query(rucio_2file_dataset, cache_empty, simple_dataset):
'Queue a download and look for it to show up'
dm = dataset_mgr(cache_empty, rucio_mgr=rucio_2file_dataset)
status, files = dm.download_ds(simple_dataset.Name)
assert files is None
assert DatasetQueryStatus.query_queued == status
def test_dataset_download_good(rucio_2file_dataset, cache_empty, simple_dataset):
'Queue a download and look for it to show up'
rucio_2file_dataset._cache_mgr = cache_empty
dm = dataset_mgr(cache_empty, rucio_mgr=rucio_2file_dataset)
_ = dm.download_ds(simple_dataset.Name)
# Wait for the dataset query to run
wait_some_time(lambda: rucio_2file_dataset.CountCalledDL == 0)
# Now, make sure that we get back what we want here.
status, files = dm.download_ds(simple_dataset.Name)
assert DatasetQueryStatus.results_valid == status
assert len(simple_dataset.FileList) == len(files)
# Make sure we didn't re-query for this.
assert 1 == rucio_2file_dataset.CountCalledDL
def test_dataset_download_good_nodownload(rucio_2file_dataset, cache_empty, simple_dataset):
'Queue a download and look for it to show up'
rucio_2file_dataset._cache_mgr = cache_empty
dm = dataset_mgr(cache_empty, rucio_mgr=rucio_2file_dataset)
status, files = dm.download_ds(simple_dataset.Name, do_download=False)
assert None is files
assert DatasetQueryStatus.does_not_exist == status
def test_dataset_download_no_exist(rucio_2file_dataset, cache_empty):
'Queue a download and look for it to show up'
rucio_2file_dataset._cache_mgr = cache_empty
dm = dataset_mgr(cache_empty, rucio_mgr=rucio_2file_dataset)
_ = dm.download_ds('bogus')
# Wait for the dataset query to run
wait_some_time(lambda: rucio_2file_dataset.CountCalledDL == 0)
# Now, make sure that we get back what we want here.
status, files = dm.download_ds('bogus')
assert DatasetQueryStatus.does_not_exist == status
assert None is files
def test_dataset_download_with_failures(rucio_2file_dataset_with_fails, cache_empty, simple_dataset):
'Queue a download, it fails, but then gets there'
rucio_2file_dataset_with_fails._cache_mgr = cache_empty
dm = dataset_mgr(cache_empty, rucio_mgr=rucio_2file_dataset_with_fails, seconds_between_retries=0.01)
_ = dm.download_ds(simple_dataset.Name)
# Wait for the dataset query to run
wait_some_time(lambda: rucio_2file_dataset_with_fails.CountCalledDL < 5)
# Now, make sure that we get back what we want here.
status, files = dm.download_ds(simple_dataset.Name)
assert DatasetQueryStatus.results_valid == status
assert len(simple_dataset.FileList) == len(files)
# 2 failures, so make sure we re-try the right number of times
assert 5 == rucio_2file_dataset_with_fails.CountCalledDL
def test_dataset_download_good_ask_twice(rucio_2file_dataset_take_time, cache_empty, simple_dataset):
'Be impatient about asking how things are going'
rucio_2file_dataset_take_time._cache_mgr = cache_empty
dm = dataset_mgr(cache_empty, rucio_mgr=rucio_2file_dataset_take_time)
_ = dm.download_ds(simple_dataset.Name)
# Ask again.
status, _ = dm.download_ds(simple_dataset.Name)
assert DatasetQueryStatus.query_queued == status
# Wait for the dataset query to run
wait_some_time(lambda: rucio_2file_dataset_take_time.CountCalledDL == 0)
# Now, make sure that we get back what we want here.
status, _ = dm.download_ds(simple_dataset.Name)
assert DatasetQueryStatus.results_valid == status
# Make sure we didn't re-query for this.
sleep(0.02)
assert 1 == rucio_2file_dataset_take_time.CountCalledDL
def test_dataset_download_logs(rucio_2file_dataset, cache_empty, simple_dataset):
'Queue a download and look for it to show up'
rucio_2file_dataset._cache_mgr = cache_empty
lg = dummy_logger()
dm = dataset_mgr(cache_empty, rucio_mgr=rucio_2file_dataset, logger=lg)
_ = dm.download_ds(simple_dataset.Name)
# Wait for the dataset query to run
wait_some_time(lambda: rucio_2file_dataset.CountCalledDL == 0)
# Make sure some lines were sent to the logger
assert len(lg.lines) > 0
def test_dataset_download_restart(rucio_do_nothing, rucio_2file_dataset, cache_empty, simple_dataset):
rucio_2file_dataset._cache_mgr = cache_empty
# Trigger the download on one.
dm0 = dataset_mgr(cache_empty, rucio_mgr=rucio_do_nothing)
_ = dm0.download_ds(simple_dataset.Name)
wait_some_time(lambda: rucio_do_nothing.CountCalledDL == 0)
# Next, create a second one with the same cache.
dm = dataset_mgr(cache_empty, rucio_mgr=rucio_2file_dataset)
wait_some_time(lambda: rucio_2file_dataset.CountCalledDL == 0)
status, _ = dm.download_ds(simple_dataset.Name)
assert DatasetQueryStatus.results_valid == status
def test_dataset_download_restart_and_marked(rucio_do_nothing, rucio_2file_dataset_with_fails, cache_empty, simple_dataset):
rucio_2file_dataset_with_fails._cache_mgr = cache_empty
rucio_2file_dataset_with_fails.DLSleep = 0.05
# Trigger the download on one.
dm0 = dataset_mgr(cache_empty, rucio_mgr=rucio_do_nothing)
_ = dm0.download_ds(simple_dataset.Name)
wait_some_time(lambda: rucio_do_nothing.CountCalledDL == 0)
# Next, create a second one with the same cache.
dm = dataset_mgr(cache_empty, rucio_mgr=rucio_2file_dataset_with_fails, seconds_between_retries=0.05)
wait_some_time(lambda: rucio_2file_dataset_with_fails.CountCalledDL < 2)
assert cache_empty.download_in_progress(simple_dataset.Name)
# wait_some_time(lambda: rucio_2file_dataset_with_fails.CountCalledDL == 0)
# status, _ = dm.download_ds(simple_dataset.Name)
# assert DatasetQueryStatus.results_valid == status
def test_dataset_no_prefix(rucio_do_nothing, cache_with_ds):
dm = dataset_mgr(cache_with_ds, rucio_mgr=rucio_do_nothing)
status, files = dm.download_ds('dataset1')
assert DatasetQueryStatus.results_valid == status
assert 2 == len(files)
assert 'f1.root' == files[0]
def test_dataset_with_prefix(rucio_do_nothing, cache_with_ds):
dm = dataset_mgr(cache_with_ds, rucio_mgr=rucio_do_nothing)
status, files = dm.download_ds('dataset1', prefix='file://cache/')
assert DatasetQueryStatus.results_valid == status
assert 2 == len(files)
assert 'file://cache/f1.root' == files[0]
| 41.232283
| 138
| 0.726201
| 2,957
| 20,946
| 4.798783
| 0.085898
| 0.061311
| 0.104228
| 0.03277
| 0.808739
| 0.783369
| 0.748908
| 0.732347
| 0.718393
| 0.694715
| 0
| 0.012413
| 0.200038
| 20,946
| 507
| 139
| 41.313609
| 0.834447
| 0.142987
| 0
| 0.600551
| 0
| 0.002755
| 0.063375
| 0
| 0
| 0
| 0
| 0
| 0.162534
| 1
| 0.15978
| false
| 0
| 0.019284
| 0.013774
| 0.267218
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 6
|
311fa7dc6c935021d952f8838d795c9a322840e7
| 603
|
py
|
Python
|
tests/test_utils.py
|
jovanzers/quart
|
63b49d509f6461f79b43f956831d7c9831067a0b
|
[
"MIT"
] | 1,085
|
2017-11-12T10:56:50.000Z
|
2022-03-31T15:14:26.000Z
|
tests/test_utils.py
|
jovanzers/quart
|
63b49d509f6461f79b43f956831d7c9831067a0b
|
[
"MIT"
] | 139
|
2017-12-04T09:22:48.000Z
|
2022-03-26T08:34:07.000Z
|
tests/test_utils.py
|
jovanzers/quart
|
63b49d509f6461f79b43f956831d7c9831067a0b
|
[
"MIT"
] | 92
|
2017-12-08T09:55:02.000Z
|
2022-03-29T20:45:44.000Z
|
from __future__ import annotations
from functools import partial
from werkzeug.datastructures import Headers
from quart.utils import decode_headers, encode_headers, is_coroutine_function
def test_is_coroutine_function() -> None:
async def async_func() -> None:
pass
assert is_coroutine_function(async_func)
assert is_coroutine_function(partial(async_func))
def test_encode_headers() -> None:
assert encode_headers(Headers({"Foo": "Bar"})) == [(b"foo", b"Bar")]
def test_decode_headers() -> None:
assert decode_headers([(b"foo", b"Bar")]) == Headers({"Foo": "Bar"})
| 25.125
| 77
| 0.728027
| 79
| 603
| 5.253165
| 0.329114
| 0.106024
| 0.183133
| 0.120482
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.149254
| 603
| 23
| 78
| 26.217391
| 0.808967
| 0
| 0
| 0
| 0
| 0
| 0.039801
| 0
| 0
| 0
| 0
| 0
| 0.307692
| 1
| 0.230769
| true
| 0.076923
| 0.307692
| 0
| 0.538462
| 0
| 0
| 0
| 0
| null | 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 1
| 1
| 1
| 1
| 0
| 1
| 0
|
0
| 6
|
3155e91a2fc2d1674120541e308e20596b92f18c
| 1,856
|
py
|
Python
|
payrolls/migrations/0006_auto_20190815_1359.py
|
aaronmatei/Payroll-System-Django
|
5605e6a152c56cd171c43dfd07ff0a99eea65b4d
|
[
"bzip2-1.0.6"
] | null | null | null |
payrolls/migrations/0006_auto_20190815_1359.py
|
aaronmatei/Payroll-System-Django
|
5605e6a152c56cd171c43dfd07ff0a99eea65b4d
|
[
"bzip2-1.0.6"
] | null | null | null |
payrolls/migrations/0006_auto_20190815_1359.py
|
aaronmatei/Payroll-System-Django
|
5605e6a152c56cd171c43dfd07ff0a99eea65b4d
|
[
"bzip2-1.0.6"
] | 2
|
2020-09-08T07:12:34.000Z
|
2021-11-19T08:25:22.000Z
|
# Generated by Django 2.2.4 on 2019-08-15 10:59
from django.db import migrations, models
class Migration(migrations.Migration):
dependencies = [
('payrolls', '0005_auto_20190815_0231'),
]
operations = [
migrations.AlterField(
model_name='employee',
name='basic_salary',
field=models.TextField(),
),
migrations.AlterField(
model_name='employee',
name='gender',
field=models.TextField(),
),
migrations.AlterField(
model_name='payrolls',
name='PAYE',
field=models.TextField(),
),
migrations.AlterField(
model_name='payrolls',
name='gross_salary',
field=models.TextField(),
),
migrations.AlterField(
model_name='payrolls',
name='net_salary',
field=models.TextField(),
),
migrations.AlterField(
model_name='payrolls',
name='nhif_deductions',
field=models.TextField(),
),
migrations.AlterField(
model_name='payrolls',
name='nssf_deductions',
field=models.TextField(),
),
migrations.AlterField(
model_name='payrolls',
name='other_deductions',
field=models.TextField(),
),
migrations.AlterField(
model_name='payrolls',
name='overtime',
field=models.TextField(),
),
migrations.AlterField(
model_name='payrolls',
name='salary_advance',
field=models.TextField(),
),
migrations.AlterField(
model_name='payrolls',
name='taxable_income',
field=models.TextField(),
),
]
| 26.898551
| 48
| 0.518319
| 147
| 1,856
| 6.394558
| 0.306122
| 0.234043
| 0.292553
| 0.339362
| 0.743617
| 0.743617
| 0.687234
| 0.687234
| 0.628723
| 0.369149
| 0
| 0.026383
| 0.366918
| 1,856
| 68
| 49
| 27.294118
| 0.773617
| 0.024246
| 0
| 0.709677
| 1
| 0
| 0.135434
| 0.012714
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| false
| 0
| 0.016129
| 0
| 0.064516
| 0
| 0
| 0
| 0
| null | 1
| 1
| 1
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 1
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 6
|
315d957ec9142eb894f6d4abf882eb9509e0d60c
| 35,118
|
py
|
Python
|
test/test_admin.py
|
Noschvie/roundup
|
996377ed0d12c69a01c7565dc5f47d6fb0ccaf19
|
[
"MIT"
] | null | null | null |
test/test_admin.py
|
Noschvie/roundup
|
996377ed0d12c69a01c7565dc5f47d6fb0ccaf19
|
[
"MIT"
] | null | null | null |
test/test_admin.py
|
Noschvie/roundup
|
996377ed0d12c69a01c7565dc5f47d6fb0ccaf19
|
[
"MIT"
] | null | null | null |
#
# Copyright (C) 2007 Stefan Seefeld
# All rights reserved.
# For license terms see the file COPYING.txt.
#
from __future__ import print_function
import unittest, os, shutil, errno, sys, difflib, cgi, re
from roundup.admin import AdminTool
from . import db_test_base
from .test_mysql import skip_mysql
from .test_postgresql import skip_postgresql
#from roundup import instance
# https://stackoverflow.com/questions/4219717/how-to-assert-output-with-nosetest-unittest-in-python
# lightly modified
from contextlib import contextmanager
_py3 = sys.version_info[0] > 2
if _py3:
from io import StringIO # py3
else:
from StringIO import StringIO # py2
@contextmanager
def captured_output():
new_out, new_err = StringIO(), StringIO()
old_out, old_err = sys.stdout, sys.stderr
try:
sys.stdout, sys.stderr = new_out, new_err
yield sys.stdout, sys.stderr
finally:
sys.stdout, sys.stderr = old_out, old_err
def normalize_file(filename, skiplines = [ None ]):
# https://stackoverflow.com/questions/4710067/using-python-for-deleting-a-specific-line-in-a-file
with open(filename, "r+") as f:
d = f.readlines()
f.seek(0)
for i in d:
for skip in skiplines:
if skip not in i:
f.write(i)
f.truncate()
class AdminTest(object):
backend = None
def setUp(self):
self.dirname = '_test_admin'
def tearDown(self):
try:
shutil.rmtree(self.dirname)
except OSError as error:
if error.errno not in (errno.ENOENT, errno.ESRCH): raise
def install_init(self, type="classic",
settings="mail_domain=example.com," +
"mail_host=localhost," +
"tracker_web=http://test/," +
"rdbms_name=rounduptest," +
"rdbms_user=rounduptest," +
"rdbms_password=rounduptest," +
"rdbms_template=template0"
):
''' install tracker with settings for required config.ini settings.
'''
admin=AdminTool()
admin.force = True # force it to nuke existing tracker
# Run under context manager to suppress output of help text.
with captured_output() as (out, err):
sys.argv=['main', '-i', self.dirname, 'install',
type, self.backend, settings ]
ret = admin.main()
self.assertEqual(ret, 0)
# nuke any existing database (mysql/postgreql)
# possible method in case admin.force doesn't work
#tracker = instance.open(self.dirname)
#if tracker.exists():
# tracker.nuke()
# initialize tracker with initial_data.py. Put password
# on cli so I don't have to respond to prompting.
sys.argv=['main', '-i', self.dirname, 'initialise', 'admin']
admin.force = True # force it to nuke existing database
ret = admin.main()
self.assertEqual(ret, 0)
def testGet(self):
''' Note the tests will fail if you run this under pdb.
the context managers capture the pdb prompts and this screws
up the stdout strings with (pdb) prefixed to the line.
'''
import sys
self.install_init()
self.admin=AdminTool()
with captured_output() as (out, err):
sys.argv=['main', '-i', self.dirname, 'create', 'issue',
'title="foo bar"', 'assignedto=admin' ]
ret = self.admin.main()
out = out.getvalue().strip()
print(out)
self.assertEqual(out, '1')
self.admin=AdminTool()
with captured_output() as (out, err):
sys.argv=['main', '-i', self.dirname, 'create', 'issue',
'title="bar foo bar"', 'assignedto=anonymous',
'superseder=1']
ret = self.admin.main()
self.assertEqual(ret, 0)
out = out.getvalue().strip()
print(out)
self.assertEqual(out, '2')
self.admin=AdminTool()
with captured_output() as (out, err):
sys.argv=['main', '-i', self.dirname, 'get', 'assignedto',
'issue2' ]
ret = self.admin.main()
self.assertEqual(ret, 0)
out = out.getvalue().strip()
err = err.getvalue().strip()
self.assertEqual(out, '2')
self.assertEqual(len(err), 0)
self.admin=AdminTool()
with captured_output() as (out, err):
sys.argv=['main', '-i', self.dirname, 'get', 'superseder',
'issue2' ]
ret = self.admin.main()
self.assertEqual(ret, 0)
out = out.getvalue().strip()
err = err.getvalue().strip()
self.assertEqual(out, "['1']")
self.assertEqual(len(err), 0)
self.admin=AdminTool()
with captured_output() as (out, err):
sys.argv=['main', '-i', self.dirname, 'get', 'title', 'issue1']
ret = self.admin.main()
self.assertEqual(ret, 0)
out = out.getvalue().strip()
err = err.getvalue().strip()
self.assertEqual(out, '"foo bar"') ## why is capture inserting "??
self.assertEqual(len(err), 0)
self.admin=AdminTool()
with captured_output() as (out, err):
sys.argv=['main', '-i', self.dirname, 'get', 'tile', 'issue1']
ret = self.admin.main()
expected_err = 'Error: no such issue property "tile"'
self.assertEqual(ret, 1)
out = out.getvalue().strip()
err = err.getvalue().strip()
self.assertEqual(out.index(expected_err), 0)
self.assertEqual(len(err), 0)
self.admin=AdminTool()
with captured_output() as (out, err):
sys.argv=['main', '-i', self.dirname, 'get', 'title', 'issue']
ret = self.admin.main()
expected_err = 'Error: "issue" not a node designator'
self.assertEqual(ret, 1)
out = out.getvalue().strip()
err = err.getvalue().strip()
self.assertEqual(out.index(expected_err), 0)
self.assertEqual(len(err), 0)
def testInit(self):
import sys
self.admin=AdminTool()
sys.argv=['main', '-i', self.dirname, 'install', 'classic', self.backend]
ret = self.admin.main()
print(ret)
self.assertTrue(ret == 0)
self.assertTrue(os.path.isfile(self.dirname + "/config.ini"))
self.assertTrue(os.path.isfile(self.dirname + "/schema.py"))
def testInitWithConfig_ini(self):
import sys
from roundup.configuration import CoreConfig
self.admin=AdminTool()
sys.argv=['main', '-i', self.dirname, 'install', 'classic', self.backend]
# create a config_ini.ini file in classic template
templates=self.admin.listTemplates()
config_ini_content = "[mail]\n# comment\ndebug = SendMail.LOG\n"
config_ini_path = templates['classic']['path'] + '/config_ini.ini'
config_ini_file = open(config_ini_path, "w")
config_ini_file.write(config_ini_content)
config_ini_file.close()
try:
ret = self.admin.main()
finally:
try:
# ignore file not found
os.remove(config_ini_path)
except OSError as e: # FileNotFound exception under py3
if e.errno == 2:
pass
else:
raise
print(ret)
self.assertTrue(ret == 0)
self.assertTrue(os.path.isfile(self.dirname + "/config.ini"))
self.assertTrue(os.path.isfile(self.dirname + "/schema.py"))
config=CoreConfig(self.dirname)
self.assertEqual(config['MAIL_DEBUG'], self.dirname + "/SendMail.LOG")
def testFind(self):
''' Note the tests will fail if you run this under pdb.
the context managers capture the pdb prompts and this screws
up the stdout strings with (pdb) prefixed to the line.
'''
import sys
self.admin=AdminTool()
self.install_init()
with captured_output() as (out, err):
sys.argv=['main', '-i', self.dirname, 'create', 'issue',
'title="foo bar"', 'assignedto=admin' ]
ret = self.admin.main()
out = out.getvalue().strip()
print(out)
self.assertEqual(out, '1')
self.admin=AdminTool()
with captured_output() as (out, err):
sys.argv=['main', '-i', self.dirname, 'create', 'issue',
'title="bar foo bar"', 'assignedto=anonymous' ]
ret = self.admin.main()
out = out.getvalue().strip()
print(out)
self.assertEqual(out, '2')
self.admin=AdminTool()
with captured_output() as (out, err):
sys.argv=['main', '-i', self.dirname, 'find', 'issue',
'assignedto=1']
ret = self.admin.main()
out = out.getvalue().strip()
print(out)
self.assertEqual(out, "['1']")
# Reopen the db closed by previous filter call
self.admin=AdminTool()
with captured_output() as (out, err):
''' 1,2 should return all entries that have assignedto
either admin or anonymous
'''
sys.argv=['main', '-i', self.dirname, 'find', 'issue',
'assignedto=1,2']
ret = self.admin.main()
out = out.getvalue().strip()
print(out)
# out can be "['2', '1']" or "['1', '2']"
# so eval to real list so Equal can do a list compare
self.assertEqual(sorted(eval(out)), ['1', '2'])
# Reopen the db closed by previous filter call
self.admin=AdminTool()
with captured_output() as (out, err):
''' 1,2 should return all entries that have assignedto
either admin or anonymous
'''
sys.argv=['main', '-i', self.dirname, 'find', 'issue',
'assignedto=admin,anonymous']
ret = self.admin.main()
out = out.getvalue().strip()
print(out)
# out can be "['2', '1']" or "['1', '2']"
# so eval to real list so Equal can do a list compare
self.assertEqual(sorted(eval(out)), ['1', '2'])
def testGenconfigUpdate(self):
''' Note the tests will fail if you run this under pdb.
the context managers capture the pdb prompts and this screws
up the stdout strings with (pdb) prefixed to the line.
'''
import sys, filecmp
self.admin=AdminTool()
self.install_init()
with captured_output() as (out, err):
sys.argv=['main', '-i', self.dirname, 'genconfig']
ret = self.admin.main()
out = out.getvalue().strip()
print(out)
expected = "Not enough arguments supplied"
self.assertTrue(expected in out)
# Reopen the db closed by previous call
self.admin=AdminTool()
with captured_output() as (out, err):
sys.argv=['main', '-i', self.dirname, 'genconfig',
self.dirname + "/config2.ini"]
ret = self.admin.main()
out = out.getvalue().strip()
print(out)
# FIXME get better successful test later.
expected = ""
self.assertTrue(expected in out)
self.assertTrue(os.path.isfile(self.dirname + "/config2.ini"))
# Files aren't the same. Lines need to be removed.
# like user, web, backend etc. Genconfig generates a file
# to be customized.
#self.assertTrue(filecmp.cmp(self.dirname + "/config2.ini",
# self.dirname + "/config.ini"))
# Reopen the db closed by previous call
self.admin=AdminTool()
with captured_output() as (out, err):
sys.argv=['main', '-i', self.dirname, 'update',
self.dirname + "/foo2.ini"]
ret = self.admin.main()
out = out.getvalue().strip()
print(out)
# FIXME get better successful test later.
expected = ""
self.assertTrue(expected in out)
self.assertTrue(os.path.isfile(self.dirname + "/foo2.ini"))
# Autogenerated date header is different. Remove it
# so filecmp passes.
normalize_file(self.dirname + "/foo2.ini",
[ '# Autogenerated at' ])
normalize_file(self.dirname + "/config.ini",
[ '# Autogenerated at' ])
self.assertTrue(filecmp.cmp(self.dirname + "/config.ini",
self.dirname + "/foo2.ini"))
def testCliParse(self):
''' Note the tests will fail if you run this under pdb.
the context managers capture the pdb prompts and this screws
up the stdout strings with (pdb) prefixed to the line.
'''
import sys
self.admin=AdminTool()
self.install_init()
# test partial command lookup fin -> calls find
with captured_output() as (out, err):
''' assignedto is not a valid property=value, so
report error.
'''
sys.argv=['main', '-i', self.dirname, 'fin', 'issue',
'assignedto=1']
ret = self.admin.main()
out = out.getvalue().strip()
print(out)
expected="[ '1' ]"
self.assertTrue(expected, out)
# Reopen the db closed by previous call
self.admin=AdminTool()
# test multiple matches
with captured_output() as (out, err):
''' assignedto is not a valid property=value, so
report error.
'''
sys.argv=['main', '-i', self.dirname, 'f', 'issue',
'assignedto']
ret = self.admin.main()
out = out.getvalue().strip()
print(out)
expected='Multiple commands match "f": filter, find'
self.assertEqual(expected, out)
# Reopen the db closed by previous call
self.admin=AdminTool()
# test broken command lookup xyzzy is not a valid command
with captured_output() as (out, err):
''' assignedto is not a valid property=value, so
report error.
'''
sys.argv=['main', '-i', self.dirname, 'xyzzy', 'issue',
'assignedto']
ret = self.admin.main()
out = out.getvalue().strip()
print(out)
expected=('Unknown command "xyzzy" '
'("help commands" for a list)')
self.assertEqual(expected, out)
# Reopen the db closed by previous call
self.admin=AdminTool()
# test for keyword=value check
with captured_output() as (out, err):
''' assignedto is not a valid property=value, so
report error.
'''
sys.argv=['main', '-i', self.dirname, 'find', 'issue',
'assignedto']
ret = self.admin.main()
out = out.getvalue().strip()
print(out)
expected='Error: argument "assignedto" not propname=value'
self.assertTrue(expected in out)
def testFilter(self):
''' Note the tests will fail if you run this under pdb.
the context managers capture the pdb prompts and this screws
up the stdout strings with (pdb) prefixed to the line.
'''
import sys
self.admin=AdminTool()
self.install_init()
with captured_output() as (out, err):
sys.argv=['main', '-i', self.dirname, 'create', 'issue',
'title="foo bar"', 'assignedto=admin' ]
ret = self.admin.main()
out = out.getvalue().strip()
print(out)
self.assertEqual(out, '1')
self.admin=AdminTool()
with captured_output() as (out, err):
sys.argv=['main', '-i', self.dirname, 'create', 'issue',
'title="bar foo bar"', 'assignedto=anonymous' ]
ret = self.admin.main()
out = out.getvalue().strip()
print(out)
self.assertEqual(out, '2')
# Reopen the db closed by previous filter call
# test string - one results, one value, substring
self.admin=AdminTool()
with captured_output() as (out, err):
sys.argv=['main', '-i', self.dirname, 'filter', 'user',
'username=admin']
ret = self.admin.main()
out = out.getvalue().strip()
print(out)
self.assertEqual(out, "['1']")
# Reopen the db closed by previous filter call
# test string - two results, two values, substring
self.admin=AdminTool()
with captured_output() as (out, err):
''' a,n should return all entries that have an a and n
so admin or anonymous
'''
sys.argv=['main', '-i', self.dirname, 'filter', 'user',
'username=a,n']
ret = self.admin.main()
out = out.getvalue().strip()
print(out)
# out can be "['2', '1']" or "['1', '2']"
# so eval to real list so Equal can do a list compare
self.assertEqual(sorted(eval(out)), ['1', '2'])
# Reopen the db closed by previous filter call
# test string - one result, two values, substring
self.admin=AdminTool()
with captured_output() as (out, err):
''' a,y should return all entries that have an a and y
so anonymous
'''
sys.argv=['main', '-i', self.dirname, 'filter', 'user',
'username=a,y']
ret = self.admin.main()
out = out.getvalue().strip()
print(out)
self.assertEqual(out, "['2']")
# Reopen the db closed by previous filter call
# test string - no results
self.admin=AdminTool()
with captured_output() as (out, err):
''' will return empty set as admin!=anonymous
'''
sys.argv=['main', '-i', self.dirname, 'filter', 'user',
'username=admin,anonymous']
ret = self.admin.main()
out = out.getvalue().strip()
print(out)
self.assertEqual(out, "[]")
# Reopen the db closed by previous filter call
# test link using ids
self.admin=AdminTool()
with captured_output() as (out, err):
sys.argv=['main', '-i', self.dirname, 'filter', 'issue',
'assignedto=1,2']
ret = self.admin.main()
out = out.getvalue().strip()
print(out)
self.assertEqual(sorted(eval(out)), ['1', '2'])
# Reopen the db closed by previous filter call
# test link using names
self.admin=AdminTool()
with captured_output() as (out, err):
''' will return empty set as admin!=anonymous
'''
sys.argv=['main', '-i', self.dirname, 'filter', 'issue',
'assignedto=admin,anonymous']
ret = self.admin.main()
out = out.getvalue().strip()
print(out)
self.assertEqual(sorted(eval(out)), ['1', '2'])
# Reopen the db closed by previous filter call
#
# case: transitive property valid match
self.admin=AdminTool()
with captured_output() as (out, err):
sys.argv=['main', '-i', self.dirname, 'filter', 'issue',
'assignedto.roles=Anonymous']
ret = self.admin.main()
out = out.getvalue().strip()
print(out)
self.assertEqual(out, "['2']")
# Reopen the db closed by previous filter call
# self.admin=AdminTool()
# case: transitive propery invalid prop
self.admin=AdminTool()
with captured_output() as (out, err):
''' assignedto is not a valid property=value, so
report error.
'''
sys.argv=['main', '-i', self.dirname, 'filter', 'issue',
'assignedto.badprop=Admin']
ret = self.admin.main()
out = out.getvalue().strip()
expected='Error: Class user has no property badprop in assignedto.badprop.'
print(out[0:len(expected)])
self.assertEqual(expected, out[0:len(expected)])
# Reopen the db closed by previous filter call
#
# case: transitive property invalid match
self.admin=AdminTool()
with captured_output() as (out, err):
sys.argv=['main', '-i', self.dirname,
'filter', 'issue',
'assignedto.username=NoNAme']
ret = self.admin.main()
out = out.getvalue().strip()
print("me: " + out)
print(err.getvalue().strip())
self.assertEqual(out, "[]")
# Reopen the db closed by previous filter call
#
# case: transitive property invalid match
self.admin=AdminTool()
with captured_output() as (out, err):
sys.argv=['main', '-i', self.dirname, '-c',
'filter', 'issue',
'assignedto.username=NoNAme']
ret = self.admin.main()
out = out.getvalue().strip()
print("me: " + out)
print(err.getvalue().strip())
self.assertEqual(out, "")
# Reopen the db closed by previous filter call
#
# case: transitive property invalid match
self.admin=AdminTool()
with captured_output() as (out, err):
sys.argv=['main', '-i', self.dirname, '-c',
'filter', 'issue',
'assignedto.username=A']
ret = self.admin.main()
out = out.getvalue().strip()
print("me: " + out)
print(err.getvalue().strip())
self.assertEqual(out, "1,2")
# Reopen the db closed by previous filter call
#
# case: transitive property invalid match
self.admin=AdminTool()
with captured_output() as (out, err):
sys.argv=['main', '-i', self.dirname, '-s',
'filter', 'issue',
'assignedto.username=A']
ret = self.admin.main()
out = out.getvalue().strip()
print("me: " + out)
print(err.getvalue().strip())
self.assertEqual(out, "1 2")
# Reopen the db closed by previous filter call
#
# case: transitive property invalid match
self.admin=AdminTool()
with captured_output() as (out, err):
sys.argv=['main', '-i', self.dirname, '-S', ':',
'-d', 'filter', 'issue',
'assignedto.username=A']
ret = self.admin.main()
out = out.getvalue().strip()
print("me: " + out)
print(err.getvalue().strip())
self.assertEqual(out, "issue1:issue2")
def disabletestHelpInitopts(self):
''' Note the tests will fail if you run this under pdb.
the context managers capture the pdb prompts and this screws
up the stdout strings with (pdb) prefixed to the line.
'''
import sys
self.install_init()
self.admin=AdminTool()
with captured_output() as (out, err):
sys.argv=['main', '-i', self.dirname, 'help', 'initopts']
ret = self.admin.main()
out = out.getvalue().strip()
expected = [
'Templates: minimal, jinja2, classic, responsive, devel',
'Back ends: anydbm, sqlite'
]
print(out)
self.assertTrue(expected[0] in out)
self.assertTrue("Back ends:" in out)
def testSet(self):
''' Note the tests will fail if you run this under pdb.
the context managers capture the pdb prompts and this screws
up the stdout strings with (pdb) prefixed to the line.
'''
import sys
self.install_init()
self.admin=AdminTool()
with captured_output() as (out, err):
sys.argv=['main', '-i', self.dirname, 'create', 'issue',
'title="foo bar"', 'assignedto=admin' ]
ret = self.admin.main()
out = out.getvalue().strip()
print(out)
self.assertEqual(out, '1')
self.admin=AdminTool()
with captured_output() as (out, err):
sys.argv=['main', '-i', self.dirname, 'create', 'issue',
'title="bar foo bar"', 'assignedto=anonymous' ]
ret = self.admin.main()
out = out.getvalue().strip()
print(out)
self.assertEqual(out, '2')
self.admin=AdminTool()
with captured_output() as (out, err):
sys.argv=['main', '-i', self.dirname, 'set', 'issue2', 'title="new title"']
ret = self.admin.main()
out = out.getvalue().strip()
err = err.getvalue().strip()
self.assertEqual(len(out), 0)
self.assertEqual(len(err), 0)
self.admin=AdminTool()
with captured_output() as (out, err):
sys.argv=['main', '-i', self.dirname, 'set', 'issue2',
'tile="new title"']
ret = self.admin.main()
expected_err = "Error: 'tile' is not a property of issue"
out = out.getvalue().strip()
err = err.getvalue().strip()
self.assertEqual(out.index(expected_err), 0)
self.assertEqual(len(err), 0)
self.admin=AdminTool()
with captured_output() as (out, err):
sys.argv=['main', '-i', self.dirname, 'set', 'issue2']
ret = self.admin.main()
expected_err = "Error: Not enough arguments supplied"
out = out.getvalue().strip()
err = err.getvalue().strip()
self.assertEqual(out.index(expected_err), 0)
self.assertEqual(len(err), 0)
self.admin=AdminTool()
with captured_output() as (out, err):
sys.argv=['main', '-i', self.dirname, 'set',
'issue2,issue1,issue', "status=1" ]
ret = self.admin.main()
expected_err = 'Error: "issue" not a node designator'
out = out.getvalue().strip()
err = err.getvalue().strip()
self.assertEqual(out.index(expected_err), 0)
self.assertEqual(len(err), 0)
self.admin=AdminTool()
with captured_output() as (out, err):
sys.argv=['main', '-i', self.dirname, 'set',
'issue2,issue1,user2', "status=1" ]
ret = self.admin.main()
expected_err = "Error: 'status' is not a property of user"
out = out.getvalue().strip()
err = err.getvalue().strip()
print(out)
print(expected_err)
print(err)
self.assertEqual(out.index(expected_err), 0)
self.assertEqual(len(err), 0)
self.admin=AdminTool()
with captured_output() as (out, err):
sys.argv=['main', '-i', self.dirname, 'set',
'issue2,issue1,issue1000', "status=1" ]
ret = self.admin.main()
expected_err = 'Error: no such issue 1000'
out = out.getvalue().strip()
err = err.getvalue().strip()
self.assertEqual(out.index(expected_err), 0)
self.assertEqual(len(err), 0)
def testSetOnClass(self):
''' Note the tests will fail if you run this under pdb.
the context managers capture the pdb prompts and this screws
up the stdout strings with (pdb) prefixed to the line.
'''
import sys
self.install_init()
self.admin=AdminTool()
with captured_output() as (out, err):
sys.argv=['main', '-i', self.dirname, 'create', 'issue',
'title="foo bar"', 'assignedto=admin' ]
ret = self.admin.main()
out = out.getvalue().strip()
print(out)
self.assertEqual(out, '1')
self.admin=AdminTool()
with captured_output() as (out, err):
sys.argv=['main', '-i', self.dirname, 'create', 'issue',
'title="bar foo bar"', 'assignedto=anonymous' ]
ret = self.admin.main()
out = out.getvalue().strip()
print(out)
self.assertEqual(out, '2')
# Run this test in a separate test.
# It can cause a database timeout/resource
# unavailable error for anydbm when run with other tests.
# Not sure why.
# Set assignedto=2 for all issues
## verify that issue 1 and 2 are assigned to user1 and user2
self.admin=AdminTool()
with captured_output() as (out, err):
sys.argv=['main', '-i', self.dirname, 'table', 'issue',
'assignedto']
ret = self.admin.main()
expected = "Assignedto\n1 \n2"
out = out.getvalue().strip()
err = err.getvalue().strip()
self.assertEqual(out, expected)
self.assertEqual(len(err), 0)
self.admin=AdminTool()
# do the set
with captured_output() as (out, err):
sys.argv=['main', '-i', self.dirname, 'set', 'issue',
'assignedto=2']
ret = self.admin.main()
expected_err = ""
out = out.getvalue().strip()
err = err.getvalue().strip()
self.assertEqual(len(out), 0)
self.assertEqual(len(err), 0)
## verify that issue 1 and 2 are assigned to user2 and user2
self.admin=AdminTool()
with captured_output() as (out, err):
sys.argv=['main', '-i', self.dirname, 'table', 'issue',
'assignedto']
ret = self.admin.main()
expected = "Assignedto\n2 \n2"
out = out.getvalue().strip()
err = err.getvalue().strip()
self.assertEqual(out, expected)
self.assertEqual(len(err), 0)
def testSpecification(self):
''' Note the tests will fail if you run this under pdb.
the context managers capture the pdb prompts and this screws
up the stdout strings with (pdb) prefixed to the line.
'''
import sys
self.install_init()
self.admin=AdminTool()
spec= [ 'username: <roundup.hyperdb.String> (key property)',
'alternate_addresses: <roundup.hyperdb.String>',
'realname: <roundup.hyperdb.String>',
'roles: <roundup.hyperdb.String>',
'organisation: <roundup.hyperdb.String>',
'queries: <roundup.hyperdb.Multilink to "query">',
'phone: <roundup.hyperdb.String>',
'address: <roundup.hyperdb.String>',
'timezone: <roundup.hyperdb.String>',
'password: <roundup.hyperdb.Password>',
]
with captured_output() as (out, err):
sys.argv=['main', '-i', self.dirname, 'specification', 'user']
ret = self.admin.main()
outlist = out.getvalue().strip().split("\n")
print(outlist)
self.assertEqual(sorted(outlist), sorted(spec))
def testTable(self):
''' Note the tests will fail if you run this under pdb.
the context managers capture the pdb prompts and this screws
up the stdout strings with (pdb) prefixed to the line.
'''
import sys
self.install_init()
self.admin=AdminTool()
with captured_output() as (out, err):
sys.argv=['main', '-i', self.dirname, 'table' ]
ret = self.admin.main()
expected = 'Error: Not enough arguments supplied'
out = out.getvalue().strip()
print(out)
print(expected)
self.assertTrue(expected in out)
####
self.admin=AdminTool()
with captured_output() as (out, err):
sys.argv=['main', '-i', self.dirname, 'table',
'id,realname,username' ]
ret = self.admin.main()
expected = 'Error: no such class "id,realname,username"'
out = out.getvalue().strip()
print(out)
print(expected)
self.assertTrue(expected in out)
####
self.admin=AdminTool()
with captured_output() as (out, err):
sys.argv=['main', '-i', self.dirname, 'table', 'user',
'id,realname,username:4:3' ]
ret = self.admin.main()
expected = 'Error: "username:4:3" not name:width'
out = out.getvalue().strip()
print(out)
print(expected)
self.assertTrue(expected in out)
####
self.admin=AdminTool()
with captured_output() as (out, err):
sys.argv=['main', '-i', self.dirname, 'table', 'user',
'id,realname,title:4' ]
ret = self.admin.main()
expected = 'Error: user has no property "title"'
out = out.getvalue().strip()
print(out)
print(expected)
self.assertTrue(expected in out)
####
self.admin=AdminTool()
with captured_output() as (out, err):
sys.argv=['main', '-i', self.dirname, 'table', 'user',
'id,realname,username:' ]
ret = self.admin.main()
# note whitespace matters. trailing spaces on lines 1 and 2
expected = """Id Realname Username
1 None admin
2 None anonymou"""
out = out.getvalue().strip()
print(out)
print(expected)
self.assertEqual(out, expected)
####
self.admin=AdminTool()
with captured_output() as (out, err):
sys.argv=['main', '-i', self.dirname, 'table', 'user',
'id,realname,username' ]
ret = self.admin.main()
# note whitespace matters. trailing spaces on lines 1 and 2
expected = """Id Realname Username
1 None admin
2 None anonymous"""
out = out.getvalue().strip()
print(out)
print(expected)
self.assertEqual(out, expected)
####
self.admin=AdminTool()
with captured_output() as (out, err):
sys.argv=['main', '-i', self.dirname, 'table', 'user',
'id:4,realname:2,username:3' ]
ret = self.admin.main()
# note whitespace matters. trailing spaces on lines 1 and 2
expected = """Id Realname Username
1 No adm
2 No ano"""
out = out.getvalue().strip()
print(out)
print(expected)
self.assertEqual(out, expected)
class anydbmAdminTest(AdminTest, unittest.TestCase):
backend = 'anydbm'
@skip_mysql
class mysqlAdminTest(AdminTest, unittest.TestCase):
backend = 'mysql'
class sqliteAdminTest(AdminTest, unittest.TestCase):
backend = 'sqlite'
@skip_postgresql
class postgresqlAdminTest(AdminTest, unittest.TestCase):
backend = 'postgresql'
| 34.062076
| 99
| 0.541517
| 3,974
| 35,118
| 4.749371
| 0.102919
| 0.056268
| 0.034969
| 0.038148
| 0.766398
| 0.757232
| 0.743722
| 0.735085
| 0.727562
| 0.706686
| 0
| 0.0079
| 0.325958
| 35,118
| 1,030
| 100
| 34.095146
| 0.789447
| 0.151375
| 0
| 0.722802
| 0
| 0
| 0.137186
| 0.025548
| 0
| 0
| 0
| 0.000971
| 0.132638
| 1
| 0.025335
| false
| 0.004471
| 0.032787
| 0
| 0.073025
| 0.089419
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 6
|
318e34e7ded1ca0487f28bb6a7ec910ca4078c63
| 23
|
py
|
Python
|
lib/backbone/HSMNet/__init__.py
|
wx-b/SMD-Nets
|
8d2ef162539e5b0becc7a43a89b54431b06d3f34
|
[
"MIT"
] | 91
|
2021-04-01T07:50:41.000Z
|
2022-03-30T02:50:42.000Z
|
lib/backbone/HSMNet/__init__.py
|
wx-b/SMD-Nets
|
8d2ef162539e5b0becc7a43a89b54431b06d3f34
|
[
"MIT"
] | 12
|
2021-05-12T07:56:33.000Z
|
2022-03-25T19:56:38.000Z
|
lib/backbone/HSMNet/__init__.py
|
wx-b/SMD-Nets
|
8d2ef162539e5b0becc7a43a89b54431b06d3f34
|
[
"MIT"
] | 25
|
2021-04-09T02:43:59.000Z
|
2022-03-26T05:27:24.000Z
|
from .hsm import HSMNet
| 23
| 23
| 0.826087
| 4
| 23
| 4.75
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.130435
| 23
| 1
| 23
| 23
| 0.95
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| true
| 0
| 1
| 0
| 1
| 0
| 1
| 1
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 0
| 1
| 0
|
0
| 6
|
31ee1f11debd840efaa8dea8c5f55f7269829510
| 1,522
|
py
|
Python
|
tests/wavefunction/orbitals/test_norm.py
|
NLESC-JCER/QMCTorch
|
c56472cd3e9cc59f2e01a880e674b7270d2cdc2b
|
[
"Apache-2.0"
] | 16
|
2020-06-26T17:43:38.000Z
|
2022-03-03T14:16:02.000Z
|
tests/wavefunction/orbitals/test_norm.py
|
NLESC-JCER/QMCTorch
|
c56472cd3e9cc59f2e01a880e674b7270d2cdc2b
|
[
"Apache-2.0"
] | 57
|
2020-05-01T07:13:49.000Z
|
2021-07-13T19:51:55.000Z
|
tests/wavefunction/orbitals/test_norm.py
|
NLESC-JCER/QMCTorch
|
c56472cd3e9cc59f2e01a880e674b7270d2cdc2b
|
[
"Apache-2.0"
] | 3
|
2020-07-30T09:56:04.000Z
|
2021-08-12T02:55:45.000Z
|
import unittest
import torch
import numpy as np
from types import SimpleNamespace
from qmctorch.wavefunction.orbitals.norm_orbital import atomic_orbital_norm
class TestAtomicOrbitalNorm(unittest.TestCase):
def test_sph_sto(self):
basis = SimpleNamespace()
basis.harmonics_type = 'sph'
basis.radial_type = 'sto'
basis.bas_n = torch.as_tensor([0, 1, 2])
basis.bas_exp = torch.rand(3)
atomic_orbital_norm(basis)
def test_sph_gto(self):
basis = SimpleNamespace()
basis.harmonics_type = 'sph'
basis.radial_type = 'gto'
basis.bas_n = torch.as_tensor([0, 1, 2])
basis.bas_exp = torch.rand(3)
atomic_orbital_norm(basis)
def test_cart_sto(self):
basis = SimpleNamespace()
basis.harmonics_type = 'cart'
basis.radial_type = 'sto'
basis.bas_exp = np.random.rand(4)
basis.bas_kx = np.array([0, 0, 0, 1])
basis.bas_ky = np.array([0, 1, 0, 0])
basis.bas_kz = np.array([0, 0, 1, 0])
basis.bas_kr = np.array([0, 0, 0, 0])
atomic_orbital_norm(basis)
def test_cart_gto(self):
basis = SimpleNamespace()
basis.harmonics_type = 'cart'
basis.radial_type = 'gto'
basis.bas_exp = np.random.rand(4)
basis.bas_kx = np.array([0, 0, 0, 1])
basis.bas_ky = np.array([0, 1, 0, 0])
basis.bas_kz = np.array([0, 0, 1, 0])
basis.bas_kr = np.array([0, 0, 0, 0])
atomic_orbital_norm(basis)
| 27.672727
| 75
| 0.607753
| 218
| 1,522
| 4.045872
| 0.206422
| 0.126984
| 0.072562
| 0.061224
| 0.783447
| 0.783447
| 0.769841
| 0.739229
| 0.739229
| 0.739229
| 0
| 0.037634
| 0.266754
| 1,522
| 54
| 76
| 28.185185
| 0.752688
| 0
| 0
| 0.75
| 0
| 0
| 0.017083
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0.1
| false
| 0
| 0.125
| 0
| 0.25
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 6
|
73097c71c96bb6728829c676c0843dc0d4f2d197
| 38
|
py
|
Python
|
web_parsers/manifest/__init__.py
|
invanalabs/web-parser
|
dca9c6354317ec7187f46fd270092372b39f63f8
|
[
"Apache-2.0"
] | 1
|
2019-10-06T23:11:32.000Z
|
2019-10-06T23:11:32.000Z
|
web_parsers/manifest/__init__.py
|
crawlerflow/extraction-engine
|
dca9c6354317ec7187f46fd270092372b39f63f8
|
[
"Apache-2.0"
] | 2
|
2020-03-11T09:33:03.000Z
|
2020-03-18T21:12:28.000Z
|
web_parsers/manifest/__init__.py
|
crawlerflow/extraction-engine
|
dca9c6354317ec7187f46fd270092372b39f63f8
|
[
"Apache-2.0"
] | null | null | null |
from .common import WebParserManifest
| 19
| 37
| 0.868421
| 4
| 38
| 8.25
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.105263
| 38
| 1
| 38
| 38
| 0.970588
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| true
| 0
| 1
| 0
| 1
| 0
| 1
| 1
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 0
| 1
| 0
|
0
| 6
|
7349558881490f705f52794a0dbba240d22dea70
| 830
|
py
|
Python
|
1-mouth01/day07/exe098.py
|
gary-gggggg/gary
|
d8ba30ea4bc2b662a2d6a87d247f813e5680d63e
|
[
"Apache-2.0"
] | 4
|
2021-02-01T10:28:11.000Z
|
2021-02-01T10:34:40.000Z
|
1-mouth01/day07/exe098.py
|
gary-gggggg/gary
|
d8ba30ea4bc2b662a2d6a87d247f813e5680d63e
|
[
"Apache-2.0"
] | null | null | null |
1-mouth01/day07/exe098.py
|
gary-gggggg/gary
|
d8ba30ea4bc2b662a2d6a87d247f813e5680d63e
|
[
"Apache-2.0"
] | null | null | null |
"""商品字典dict_commodity_infos = {1001: {"name": "屠龙刀",\
"price": 10000}, 1002: {"name": "倚天剑", "price": 10000}, \
1003: {"name": "金箍棒", "price": 52100}, \
1004: {"name": "口罩", "price": 20}, \
1005: {"name": "酒精", "price": 30}, }# \
订单列表list_orders = [{"cid": 1001, "count": 1}, \
{"cid": 1002, "count": 3}, {"cid": 1005, "count": 2},]\
1.打印所有商品信息, 格式:商品编号 xx,商品名称 xx,商品单价 xx. /
2. 打印所有订单中的信息, 格式:商品编号 xx,购买数量 xx"""
# 商品字典
dict_commodity_infos = {1001: {"name": "屠龙刀",\
"price": 10000}, 1002: {"name": "倚天剑", "price": 10000}, \
1003: {"name": "金箍棒", "price": 52100}, \
1004: {"name": "口罩", "price": 20}, \
1005: {"name": "酒精", "price": 30}, }# \
# 订单列表
list_orders = [{"cid": 1001, "count": 1}, \
{"cid": 1002, "count": 3}, {"cid": 1005, "count": 2},]
for i1 in list_orders:
print(f"商品编号{i1['cid']},购买数量{i1['count']}")
| 39.52381
| 58
| 0.539759
| 115
| 830
| 3.834783
| 0.373913
| 0.090703
| 0.081633
| 0.099773
| 0.707483
| 0.707483
| 0.707483
| 0.707483
| 0.707483
| 0.707483
| 0
| 0.160969
| 0.154217
| 830
| 20
| 59
| 41.5
| 0.467236
| 0.495181
| 0
| 0
| 0
| 0
| 0.289673
| 0.083123
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| false
| 0
| 0
| 0
| 0
| 0.111111
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 6
|
7dfc744c0aadfe4abe00325f9c9ac8169db552c1
| 67
|
py
|
Python
|
tests/test_runner.py
|
uuosio/python-contract-demos
|
7d56ba371f2115b0ab895fca3e71092c2523f25d
|
[
"MIT"
] | 2
|
2020-12-08T13:15:06.000Z
|
2020-12-29T10:06:44.000Z
|
tests/test_runner.py
|
uuosio/python-contract-demos
|
7d56ba371f2115b0ab895fca3e71092c2523f25d
|
[
"MIT"
] | null | null | null |
tests/test_runner.py
|
uuosio/python-contract-demos
|
7d56ba371f2115b0ab895fca3e71092c2523f25d
|
[
"MIT"
] | null | null | null |
import sys
from uuoskit import test_helper
test_helper.run_test()
| 13.4
| 31
| 0.835821
| 11
| 67
| 4.818182
| 0.636364
| 0.377358
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.119403
| 67
| 4
| 32
| 16.75
| 0.898305
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| true
| 0
| 0.666667
| 0
| 0.666667
| 0
| 1
| 0
| 0
| null | 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 0
| 1
| 0
|
0
| 6
|
b439d703d73e42b8eab71264418cc69432e989ba
| 245
|
py
|
Python
|
TelegramBot/client/exceptions.py
|
kolyasalubov/ProjectShop
|
41a62d27276542bf65aab51663220ffe88858024
|
[
"MIT"
] | 3
|
2021-08-30T09:15:52.000Z
|
2022-03-24T14:40:11.000Z
|
TelegramBot/client/exceptions.py
|
kolyasalubov/ProjectShop
|
41a62d27276542bf65aab51663220ffe88858024
|
[
"MIT"
] | 150
|
2021-08-30T12:37:17.000Z
|
2021-11-17T07:09:19.000Z
|
TelegramBot/client/exceptions.py
|
kolyasalubov/ProjectShop
|
41a62d27276542bf65aab51663220ffe88858024
|
[
"MIT"
] | null | null | null |
from requests import HTTPError
class ClientError(HTTPError):
"""
Error for all responses with status code 4xx.
"""
pass
class ServerError(HTTPError):
"""
Error for all responses with status coe 5xx
"""
pass
| 13.611111
| 49
| 0.644898
| 28
| 245
| 5.642857
| 0.642857
| 0.177215
| 0.21519
| 0.253165
| 0.493671
| 0.493671
| 0.493671
| 0
| 0
| 0
| 0
| 0.011299
| 0.277551
| 245
| 17
| 50
| 14.411765
| 0.881356
| 0.363265
| 0
| 0.4
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| true
| 0.4
| 0.2
| 0
| 0.6
| 0
| 1
| 0
| 0
| null | 0
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 1
| 0
| 0
| 1
| 0
|
0
| 6
|
b44b3aac8aa2f9124ce6a901fe66673b02a1258c
| 41,564
|
py
|
Python
|
pybind/slxos/v16r_1_00b/interface/ethernet/ipv6/interface_ospfv3_conf/authentication/ipsec_auth_key_config/__init__.py
|
shivharis/pybind
|
4e1c6d54b9fd722ccec25546ba2413d79ce337e6
|
[
"Apache-2.0"
] | null | null | null |
pybind/slxos/v16r_1_00b/interface/ethernet/ipv6/interface_ospfv3_conf/authentication/ipsec_auth_key_config/__init__.py
|
shivharis/pybind
|
4e1c6d54b9fd722ccec25546ba2413d79ce337e6
|
[
"Apache-2.0"
] | null | null | null |
pybind/slxos/v16r_1_00b/interface/ethernet/ipv6/interface_ospfv3_conf/authentication/ipsec_auth_key_config/__init__.py
|
shivharis/pybind
|
4e1c6d54b9fd722ccec25546ba2413d79ce337e6
|
[
"Apache-2.0"
] | 1
|
2021-11-05T22:15:42.000Z
|
2021-11-05T22:15:42.000Z
|
from operator import attrgetter
import pyangbind.lib.xpathhelper as xpathhelper
from pyangbind.lib.yangtypes import RestrictedPrecisionDecimalType, RestrictedClassType, TypedListType
from pyangbind.lib.yangtypes import YANGBool, YANGListType, YANGDynClass, ReferenceType
from pyangbind.lib.base import PybindBase
from decimal import Decimal
from bitarray import bitarray
import __builtin__
class ipsec_auth_key_config(PybindBase):
"""
This class was auto-generated by the PythonClass plugin for PYANG
from YANG module brocade-interface - based on the path /interface/ethernet/ipv6/interface-ospfv3-conf/authentication/ipsec-auth-key-config. Each member element of
the container is represented as a class variable - with a specific
YANG type.
"""
__slots__ = ('_pybind_generated_by', '_path_helper', '_yang_name', '_rest_name', '_extmethods', '__spi','__ah','__ah_no_encrypt','__ah_key','__esp','__esp_no_encrypt','__esp_key','__esp_auth','__esp_auth_no_encrypt','__esp_auth_key',)
_yang_name = 'ipsec-auth-key-config'
_rest_name = ''
_pybind_generated_by = 'container'
def __init__(self, *args, **kwargs):
path_helper_ = kwargs.pop("path_helper", None)
if path_helper_ is False:
self._path_helper = False
elif path_helper_ is not None and isinstance(path_helper_, xpathhelper.YANGPathHelper):
self._path_helper = path_helper_
elif hasattr(self, "_parent"):
path_helper_ = getattr(self._parent, "_path_helper", False)
self._path_helper = path_helper_
else:
self._path_helper = False
extmethods = kwargs.pop("extmethods", None)
if extmethods is False:
self._extmethods = False
elif extmethods is not None and isinstance(extmethods, dict):
self._extmethods = extmethods
elif hasattr(self, "_parent"):
extmethods = getattr(self._parent, "_extmethods", None)
self._extmethods = extmethods
else:
self._extmethods = False
self.__ah_no_encrypt = YANGDynClass(base=YANGBool, is_leaf=True, yang_name="ah-no-encrypt", rest_name="no-encrypt", parent=self, choice=(u'ch-algorithm', u'ca-ah-algorithm'), path_helper=self._path_helper, extmethods=self._extmethods, register_paths=True, extensions={u'tailf-common': {u'info': u'Do not encrypt the key ', u'cli-optional-in-sequence': None, u'hidden': u'full', u'alt-name': u'no-encrypt', u'cli-incomplete-command': None}}, namespace='urn:brocade.com:mgmt:brocade-ospfv3', defining_module='brocade-ospfv3', yang_type='empty', is_config=True)
self.__esp = YANGDynClass(base=RestrictedClassType(base_type=unicode, restriction_type="dict_key", restriction_arg={u'NULL': {'value': 1}},), is_leaf=True, yang_name="esp", rest_name="esp", parent=self, choice=(u'ch-algorithm', u'ca-esp-algorithm'), path_helper=self._path_helper, extmethods=self._extmethods, register_paths=True, extensions={u'tailf-common': {u'info': u'Specify Encapsulating Security Payload (ESP)', u'cli-incomplete-command': None}}, namespace='urn:brocade.com:mgmt:brocade-ospfv3', defining_module='brocade-ospfv3', yang_type='algorithm-type-esp', is_config=True)
self.__ah = YANGDynClass(base=RestrictedClassType(base_type=unicode, restriction_type="dict_key", restriction_arg={u'hmac-sha1': {'value': 2}, u'hmac-md5': {'value': 1}},), is_leaf=True, yang_name="ah", rest_name="ah", parent=self, choice=(u'ch-algorithm', u'ca-ah-algorithm'), path_helper=self._path_helper, extmethods=self._extmethods, register_paths=True, extensions={u'tailf-common': {u'info': u'Specify the authentication algorithm to use', u'cli-incomplete-command': None}}, namespace='urn:brocade.com:mgmt:brocade-ospfv3', defining_module='brocade-ospfv3', yang_type='algorithm-type-ah', is_config=True)
self.__esp_key = YANGDynClass(base=RestrictedClassType(base_type=unicode, restriction_dict={'length': [u'0..80']}), is_leaf=True, yang_name="esp-key", rest_name="esp-key", parent=self, choice=(u'ch-algorithm', u'ca-esp-algorithm'), path_helper=self._path_helper, extmethods=self._extmethods, register_paths=True, extensions={u'tailf-common': {u'info': u'Hexadecimal key string for ESP', u'hidden': u'full', u'cli-incomplete-command': None}}, namespace='urn:brocade.com:mgmt:brocade-ospfv3', defining_module='brocade-ospfv3', yang_type='ipsec-authentication-hexkey-string', is_config=True)
self.__esp_auth_key = YANGDynClass(base=RestrictedClassType(base_type=unicode, restriction_dict={'length': [u'0..80']}), is_leaf=True, yang_name="esp-auth-key", rest_name="key", parent=self, choice=(u'ch-algorithm', u'ca-esp-algorithm'), path_helper=self._path_helper, extmethods=self._extmethods, register_paths=True, extensions={u'tailf-common': {u'info': u'Hexadecimal key string for authentication algorithm', u'alt-name': u'key', u'suppress-echo': u'true'}}, namespace='urn:brocade.com:mgmt:brocade-ospfv3', defining_module='brocade-ospfv3', yang_type='ipsec-authentication-hexkey-string', is_config=True)
self.__spi = YANGDynClass(base=RestrictedClassType(base_type=RestrictedClassType(base_type=long, restriction_dict={'range': ['0..4294967295']}, int_size=32), restriction_dict={'range': [u'512..4294967295']}), is_leaf=True, yang_name="spi", rest_name="spi", parent=self, path_helper=self._path_helper, extmethods=self._extmethods, register_paths=True, extensions={u'tailf-common': {u'info': u'Security Parameter Index', u'cli-full-no': None, u'cli-incomplete-command': None}}, namespace='urn:brocade.com:mgmt:brocade-ospfv3', defining_module='brocade-ospfv3', yang_type='spi-value-type', is_config=True)
self.__esp_no_encrypt = YANGDynClass(base=YANGBool, is_leaf=True, yang_name="esp-no-encrypt", rest_name="esp-no-encrypt", parent=self, choice=(u'ch-algorithm', u'ca-esp-algorithm'), path_helper=self._path_helper, extmethods=self._extmethods, register_paths=True, extensions={u'tailf-common': {u'info': u'Do not encrypt the key ', u'cli-optional-in-sequence': None, u'hidden': u'full', u'cli-incomplete-command': None}}, namespace='urn:brocade.com:mgmt:brocade-ospfv3', defining_module='brocade-ospfv3', yang_type='empty', is_config=True)
self.__esp_auth = YANGDynClass(base=RestrictedClassType(base_type=unicode, restriction_type="dict_key", restriction_arg={u'hmac-sha1': {'value': 2}, u'hmac-md5': {'value': 1}},), is_leaf=True, yang_name="esp-auth", rest_name="esp-auth", parent=self, choice=(u'ch-algorithm', u'ca-esp-algorithm'), path_helper=self._path_helper, extmethods=self._extmethods, register_paths=True, extensions={u'tailf-common': {u'info': u'Use Authentication Algorithm', u'cli-drop-node-name': None, u'cli-incomplete-command': None}}, namespace='urn:brocade.com:mgmt:brocade-ospfv3', defining_module='brocade-ospfv3', yang_type='algorithm-type-ah', is_config=True)
self.__esp_auth_no_encrypt = YANGDynClass(base=YANGBool, is_leaf=True, yang_name="esp-auth-no-encrypt", rest_name="no-encrypt", parent=self, choice=(u'ch-algorithm', u'ca-esp-algorithm'), path_helper=self._path_helper, extmethods=self._extmethods, register_paths=True, extensions={u'tailf-common': {u'info': u'Do not encrypt the key ', u'cli-optional-in-sequence': None, u'hidden': u'full', u'alt-name': u'no-encrypt', u'cli-incomplete-command': None}}, namespace='urn:brocade.com:mgmt:brocade-ospfv3', defining_module='brocade-ospfv3', yang_type='empty', is_config=True)
self.__ah_key = YANGDynClass(base=RestrictedClassType(base_type=unicode, restriction_dict={'length': [u'0..80']}), is_leaf=True, yang_name="ah-key", rest_name="key", parent=self, choice=(u'ch-algorithm', u'ca-ah-algorithm'), path_helper=self._path_helper, extmethods=self._extmethods, register_paths=True, extensions={u'tailf-common': {u'info': u'Key used for ah', u'alt-name': u'key', u'suppress-echo': u'true'}}, namespace='urn:brocade.com:mgmt:brocade-ospfv3', defining_module='brocade-ospfv3', yang_type='ipsec-authentication-hexkey-string', is_config=True)
load = kwargs.pop("load", None)
if args:
if len(args) > 1:
raise TypeError("cannot create a YANG container with >1 argument")
all_attr = True
for e in self._pyangbind_elements:
if not hasattr(args[0], e):
all_attr = False
break
if not all_attr:
raise ValueError("Supplied object did not have the correct attributes")
for e in self._pyangbind_elements:
nobj = getattr(args[0], e)
if nobj._changed() is False:
continue
setmethod = getattr(self, "_set_%s" % e)
if load is None:
setmethod(getattr(args[0], e))
else:
setmethod(getattr(args[0], e), load=load)
def _path(self):
if hasattr(self, "_parent"):
return self._parent._path()+[self._yang_name]
else:
return [u'interface', u'ethernet', u'ipv6', u'interface-ospfv3-conf', u'authentication', u'ipsec-auth-key-config']
def _rest_path(self):
if hasattr(self, "_parent"):
if self._rest_name:
return self._parent._rest_path()+[self._rest_name]
else:
return self._parent._rest_path()
else:
return [u'interface', u'Ethernet', u'ipv6', u'ospf', u'authentication']
def _get_spi(self):
"""
Getter method for spi, mapped from YANG variable /interface/ethernet/ipv6/interface_ospfv3_conf/authentication/ipsec_auth_key_config/spi (spi-value-type)
YANG Description: Security Parameter IndexThe SPI value in combination with the destination IP address and security protocol uniquely identifies the Security Association for a datagram. The near-end and far-end values for the spi must be the same.
"""
return self.__spi
def _set_spi(self, v, load=False):
"""
Setter method for spi, mapped from YANG variable /interface/ethernet/ipv6/interface_ospfv3_conf/authentication/ipsec_auth_key_config/spi (spi-value-type)
If this variable is read-only (config: false) in the
source YANG file, then _set_spi is considered as a private
method. Backends looking to populate this variable should
do so via calling thisObj._set_spi() directly.
YANG Description: Security Parameter IndexThe SPI value in combination with the destination IP address and security protocol uniquely identifies the Security Association for a datagram. The near-end and far-end values for the spi must be the same.
"""
if hasattr(v, "_utype"):
v = v._utype(v)
try:
t = YANGDynClass(v,base=RestrictedClassType(base_type=RestrictedClassType(base_type=long, restriction_dict={'range': ['0..4294967295']}, int_size=32), restriction_dict={'range': [u'512..4294967295']}), is_leaf=True, yang_name="spi", rest_name="spi", parent=self, path_helper=self._path_helper, extmethods=self._extmethods, register_paths=True, extensions={u'tailf-common': {u'info': u'Security Parameter Index', u'cli-full-no': None, u'cli-incomplete-command': None}}, namespace='urn:brocade.com:mgmt:brocade-ospfv3', defining_module='brocade-ospfv3', yang_type='spi-value-type', is_config=True)
except (TypeError, ValueError):
raise ValueError({
'error-string': """spi must be of a type compatible with spi-value-type""",
'defined-type': "brocade-ospfv3:spi-value-type",
'generated-type': """YANGDynClass(base=RestrictedClassType(base_type=RestrictedClassType(base_type=long, restriction_dict={'range': ['0..4294967295']}, int_size=32), restriction_dict={'range': [u'512..4294967295']}), is_leaf=True, yang_name="spi", rest_name="spi", parent=self, path_helper=self._path_helper, extmethods=self._extmethods, register_paths=True, extensions={u'tailf-common': {u'info': u'Security Parameter Index', u'cli-full-no': None, u'cli-incomplete-command': None}}, namespace='urn:brocade.com:mgmt:brocade-ospfv3', defining_module='brocade-ospfv3', yang_type='spi-value-type', is_config=True)""",
})
self.__spi = t
if hasattr(self, '_set'):
self._set()
def _unset_spi(self):
self.__spi = YANGDynClass(base=RestrictedClassType(base_type=RestrictedClassType(base_type=long, restriction_dict={'range': ['0..4294967295']}, int_size=32), restriction_dict={'range': [u'512..4294967295']}), is_leaf=True, yang_name="spi", rest_name="spi", parent=self, path_helper=self._path_helper, extmethods=self._extmethods, register_paths=True, extensions={u'tailf-common': {u'info': u'Security Parameter Index', u'cli-full-no': None, u'cli-incomplete-command': None}}, namespace='urn:brocade.com:mgmt:brocade-ospfv3', defining_module='brocade-ospfv3', yang_type='spi-value-type', is_config=True)
def _get_ah(self):
"""
Getter method for ah, mapped from YANG variable /interface/ethernet/ipv6/interface_ospfv3_conf/authentication/ipsec_auth_key_config/ah (algorithm-type-ah)
YANG Description: Specify the authentication algorithm to use.
"""
return self.__ah
def _set_ah(self, v, load=False):
"""
Setter method for ah, mapped from YANG variable /interface/ethernet/ipv6/interface_ospfv3_conf/authentication/ipsec_auth_key_config/ah (algorithm-type-ah)
If this variable is read-only (config: false) in the
source YANG file, then _set_ah is considered as a private
method. Backends looking to populate this variable should
do so via calling thisObj._set_ah() directly.
YANG Description: Specify the authentication algorithm to use.
"""
if hasattr(v, "_utype"):
v = v._utype(v)
try:
t = YANGDynClass(v,base=RestrictedClassType(base_type=unicode, restriction_type="dict_key", restriction_arg={u'hmac-sha1': {'value': 2}, u'hmac-md5': {'value': 1}},), is_leaf=True, yang_name="ah", rest_name="ah", parent=self, choice=(u'ch-algorithm', u'ca-ah-algorithm'), path_helper=self._path_helper, extmethods=self._extmethods, register_paths=True, extensions={u'tailf-common': {u'info': u'Specify the authentication algorithm to use', u'cli-incomplete-command': None}}, namespace='urn:brocade.com:mgmt:brocade-ospfv3', defining_module='brocade-ospfv3', yang_type='algorithm-type-ah', is_config=True)
except (TypeError, ValueError):
raise ValueError({
'error-string': """ah must be of a type compatible with algorithm-type-ah""",
'defined-type': "brocade-ospfv3:algorithm-type-ah",
'generated-type': """YANGDynClass(base=RestrictedClassType(base_type=unicode, restriction_type="dict_key", restriction_arg={u'hmac-sha1': {'value': 2}, u'hmac-md5': {'value': 1}},), is_leaf=True, yang_name="ah", rest_name="ah", parent=self, choice=(u'ch-algorithm', u'ca-ah-algorithm'), path_helper=self._path_helper, extmethods=self._extmethods, register_paths=True, extensions={u'tailf-common': {u'info': u'Specify the authentication algorithm to use', u'cli-incomplete-command': None}}, namespace='urn:brocade.com:mgmt:brocade-ospfv3', defining_module='brocade-ospfv3', yang_type='algorithm-type-ah', is_config=True)""",
})
self.__ah = t
if hasattr(self, '_set'):
self._set()
def _unset_ah(self):
self.__ah = YANGDynClass(base=RestrictedClassType(base_type=unicode, restriction_type="dict_key", restriction_arg={u'hmac-sha1': {'value': 2}, u'hmac-md5': {'value': 1}},), is_leaf=True, yang_name="ah", rest_name="ah", parent=self, choice=(u'ch-algorithm', u'ca-ah-algorithm'), path_helper=self._path_helper, extmethods=self._extmethods, register_paths=True, extensions={u'tailf-common': {u'info': u'Specify the authentication algorithm to use', u'cli-incomplete-command': None}}, namespace='urn:brocade.com:mgmt:brocade-ospfv3', defining_module='brocade-ospfv3', yang_type='algorithm-type-ah', is_config=True)
def _get_ah_no_encrypt(self):
"""
Getter method for ah_no_encrypt, mapped from YANG variable /interface/ethernet/ipv6/interface_ospfv3_conf/authentication/ipsec_auth_key_config/ah_no_encrypt (empty)
YANG Description: Do not encrypt the key
"""
return self.__ah_no_encrypt
def _set_ah_no_encrypt(self, v, load=False):
"""
Setter method for ah_no_encrypt, mapped from YANG variable /interface/ethernet/ipv6/interface_ospfv3_conf/authentication/ipsec_auth_key_config/ah_no_encrypt (empty)
If this variable is read-only (config: false) in the
source YANG file, then _set_ah_no_encrypt is considered as a private
method. Backends looking to populate this variable should
do so via calling thisObj._set_ah_no_encrypt() directly.
YANG Description: Do not encrypt the key
"""
if hasattr(v, "_utype"):
v = v._utype(v)
try:
t = YANGDynClass(v,base=YANGBool, is_leaf=True, yang_name="ah-no-encrypt", rest_name="no-encrypt", parent=self, choice=(u'ch-algorithm', u'ca-ah-algorithm'), path_helper=self._path_helper, extmethods=self._extmethods, register_paths=True, extensions={u'tailf-common': {u'info': u'Do not encrypt the key ', u'cli-optional-in-sequence': None, u'hidden': u'full', u'alt-name': u'no-encrypt', u'cli-incomplete-command': None}}, namespace='urn:brocade.com:mgmt:brocade-ospfv3', defining_module='brocade-ospfv3', yang_type='empty', is_config=True)
except (TypeError, ValueError):
raise ValueError({
'error-string': """ah_no_encrypt must be of a type compatible with empty""",
'defined-type': "empty",
'generated-type': """YANGDynClass(base=YANGBool, is_leaf=True, yang_name="ah-no-encrypt", rest_name="no-encrypt", parent=self, choice=(u'ch-algorithm', u'ca-ah-algorithm'), path_helper=self._path_helper, extmethods=self._extmethods, register_paths=True, extensions={u'tailf-common': {u'info': u'Do not encrypt the key ', u'cli-optional-in-sequence': None, u'hidden': u'full', u'alt-name': u'no-encrypt', u'cli-incomplete-command': None}}, namespace='urn:brocade.com:mgmt:brocade-ospfv3', defining_module='brocade-ospfv3', yang_type='empty', is_config=True)""",
})
self.__ah_no_encrypt = t
if hasattr(self, '_set'):
self._set()
def _unset_ah_no_encrypt(self):
self.__ah_no_encrypt = YANGDynClass(base=YANGBool, is_leaf=True, yang_name="ah-no-encrypt", rest_name="no-encrypt", parent=self, choice=(u'ch-algorithm', u'ca-ah-algorithm'), path_helper=self._path_helper, extmethods=self._extmethods, register_paths=True, extensions={u'tailf-common': {u'info': u'Do not encrypt the key ', u'cli-optional-in-sequence': None, u'hidden': u'full', u'alt-name': u'no-encrypt', u'cli-incomplete-command': None}}, namespace='urn:brocade.com:mgmt:brocade-ospfv3', defining_module='brocade-ospfv3', yang_type='empty', is_config=True)
def _get_ah_key(self):
"""
Getter method for ah_key, mapped from YANG variable /interface/ethernet/ipv6/interface_ospfv3_conf/authentication/ipsec_auth_key_config/ah_key (ipsec-authentication-hexkey-string)
YANG Description: Key used for ah
"""
return self.__ah_key
def _set_ah_key(self, v, load=False):
"""
Setter method for ah_key, mapped from YANG variable /interface/ethernet/ipv6/interface_ospfv3_conf/authentication/ipsec_auth_key_config/ah_key (ipsec-authentication-hexkey-string)
If this variable is read-only (config: false) in the
source YANG file, then _set_ah_key is considered as a private
method. Backends looking to populate this variable should
do so via calling thisObj._set_ah_key() directly.
YANG Description: Key used for ah
"""
if hasattr(v, "_utype"):
v = v._utype(v)
try:
t = YANGDynClass(v,base=RestrictedClassType(base_type=unicode, restriction_dict={'length': [u'0..80']}), is_leaf=True, yang_name="ah-key", rest_name="key", parent=self, choice=(u'ch-algorithm', u'ca-ah-algorithm'), path_helper=self._path_helper, extmethods=self._extmethods, register_paths=True, extensions={u'tailf-common': {u'info': u'Key used for ah', u'alt-name': u'key', u'suppress-echo': u'true'}}, namespace='urn:brocade.com:mgmt:brocade-ospfv3', defining_module='brocade-ospfv3', yang_type='ipsec-authentication-hexkey-string', is_config=True)
except (TypeError, ValueError):
raise ValueError({
'error-string': """ah_key must be of a type compatible with ipsec-authentication-hexkey-string""",
'defined-type': "brocade-ospfv3:ipsec-authentication-hexkey-string",
'generated-type': """YANGDynClass(base=RestrictedClassType(base_type=unicode, restriction_dict={'length': [u'0..80']}), is_leaf=True, yang_name="ah-key", rest_name="key", parent=self, choice=(u'ch-algorithm', u'ca-ah-algorithm'), path_helper=self._path_helper, extmethods=self._extmethods, register_paths=True, extensions={u'tailf-common': {u'info': u'Key used for ah', u'alt-name': u'key', u'suppress-echo': u'true'}}, namespace='urn:brocade.com:mgmt:brocade-ospfv3', defining_module='brocade-ospfv3', yang_type='ipsec-authentication-hexkey-string', is_config=True)""",
})
self.__ah_key = t
if hasattr(self, '_set'):
self._set()
def _unset_ah_key(self):
self.__ah_key = YANGDynClass(base=RestrictedClassType(base_type=unicode, restriction_dict={'length': [u'0..80']}), is_leaf=True, yang_name="ah-key", rest_name="key", parent=self, choice=(u'ch-algorithm', u'ca-ah-algorithm'), path_helper=self._path_helper, extmethods=self._extmethods, register_paths=True, extensions={u'tailf-common': {u'info': u'Key used for ah', u'alt-name': u'key', u'suppress-echo': u'true'}}, namespace='urn:brocade.com:mgmt:brocade-ospfv3', defining_module='brocade-ospfv3', yang_type='ipsec-authentication-hexkey-string', is_config=True)
def _get_esp(self):
"""
Getter method for esp, mapped from YANG variable /interface/ethernet/ipv6/interface_ospfv3_conf/authentication/ipsec_auth_key_config/esp (algorithm-type-esp)
YANG Description: Specify Encapsulating Security Payload (ESP) as the protocol to provide packet-level security.
"""
return self.__esp
def _set_esp(self, v, load=False):
"""
Setter method for esp, mapped from YANG variable /interface/ethernet/ipv6/interface_ospfv3_conf/authentication/ipsec_auth_key_config/esp (algorithm-type-esp)
If this variable is read-only (config: false) in the
source YANG file, then _set_esp is considered as a private
method. Backends looking to populate this variable should
do so via calling thisObj._set_esp() directly.
YANG Description: Specify Encapsulating Security Payload (ESP) as the protocol to provide packet-level security.
"""
if hasattr(v, "_utype"):
v = v._utype(v)
try:
t = YANGDynClass(v,base=RestrictedClassType(base_type=unicode, restriction_type="dict_key", restriction_arg={u'NULL': {'value': 1}},), is_leaf=True, yang_name="esp", rest_name="esp", parent=self, choice=(u'ch-algorithm', u'ca-esp-algorithm'), path_helper=self._path_helper, extmethods=self._extmethods, register_paths=True, extensions={u'tailf-common': {u'info': u'Specify Encapsulating Security Payload (ESP)', u'cli-incomplete-command': None}}, namespace='urn:brocade.com:mgmt:brocade-ospfv3', defining_module='brocade-ospfv3', yang_type='algorithm-type-esp', is_config=True)
except (TypeError, ValueError):
raise ValueError({
'error-string': """esp must be of a type compatible with algorithm-type-esp""",
'defined-type': "brocade-ospfv3:algorithm-type-esp",
'generated-type': """YANGDynClass(base=RestrictedClassType(base_type=unicode, restriction_type="dict_key", restriction_arg={u'NULL': {'value': 1}},), is_leaf=True, yang_name="esp", rest_name="esp", parent=self, choice=(u'ch-algorithm', u'ca-esp-algorithm'), path_helper=self._path_helper, extmethods=self._extmethods, register_paths=True, extensions={u'tailf-common': {u'info': u'Specify Encapsulating Security Payload (ESP)', u'cli-incomplete-command': None}}, namespace='urn:brocade.com:mgmt:brocade-ospfv3', defining_module='brocade-ospfv3', yang_type='algorithm-type-esp', is_config=True)""",
})
self.__esp = t
if hasattr(self, '_set'):
self._set()
def _unset_esp(self):
self.__esp = YANGDynClass(base=RestrictedClassType(base_type=unicode, restriction_type="dict_key", restriction_arg={u'NULL': {'value': 1}},), is_leaf=True, yang_name="esp", rest_name="esp", parent=self, choice=(u'ch-algorithm', u'ca-esp-algorithm'), path_helper=self._path_helper, extmethods=self._extmethods, register_paths=True, extensions={u'tailf-common': {u'info': u'Specify Encapsulating Security Payload (ESP)', u'cli-incomplete-command': None}}, namespace='urn:brocade.com:mgmt:brocade-ospfv3', defining_module='brocade-ospfv3', yang_type='algorithm-type-esp', is_config=True)
def _get_esp_no_encrypt(self):
"""
Getter method for esp_no_encrypt, mapped from YANG variable /interface/ethernet/ipv6/interface_ospfv3_conf/authentication/ipsec_auth_key_config/esp_no_encrypt (empty)
YANG Description: Do not encrypt the key
"""
return self.__esp_no_encrypt
def _set_esp_no_encrypt(self, v, load=False):
"""
Setter method for esp_no_encrypt, mapped from YANG variable /interface/ethernet/ipv6/interface_ospfv3_conf/authentication/ipsec_auth_key_config/esp_no_encrypt (empty)
If this variable is read-only (config: false) in the
source YANG file, then _set_esp_no_encrypt is considered as a private
method. Backends looking to populate this variable should
do so via calling thisObj._set_esp_no_encrypt() directly.
YANG Description: Do not encrypt the key
"""
if hasattr(v, "_utype"):
v = v._utype(v)
try:
t = YANGDynClass(v,base=YANGBool, is_leaf=True, yang_name="esp-no-encrypt", rest_name="esp-no-encrypt", parent=self, choice=(u'ch-algorithm', u'ca-esp-algorithm'), path_helper=self._path_helper, extmethods=self._extmethods, register_paths=True, extensions={u'tailf-common': {u'info': u'Do not encrypt the key ', u'cli-optional-in-sequence': None, u'hidden': u'full', u'cli-incomplete-command': None}}, namespace='urn:brocade.com:mgmt:brocade-ospfv3', defining_module='brocade-ospfv3', yang_type='empty', is_config=True)
except (TypeError, ValueError):
raise ValueError({
'error-string': """esp_no_encrypt must be of a type compatible with empty""",
'defined-type': "empty",
'generated-type': """YANGDynClass(base=YANGBool, is_leaf=True, yang_name="esp-no-encrypt", rest_name="esp-no-encrypt", parent=self, choice=(u'ch-algorithm', u'ca-esp-algorithm'), path_helper=self._path_helper, extmethods=self._extmethods, register_paths=True, extensions={u'tailf-common': {u'info': u'Do not encrypt the key ', u'cli-optional-in-sequence': None, u'hidden': u'full', u'cli-incomplete-command': None}}, namespace='urn:brocade.com:mgmt:brocade-ospfv3', defining_module='brocade-ospfv3', yang_type='empty', is_config=True)""",
})
self.__esp_no_encrypt = t
if hasattr(self, '_set'):
self._set()
def _unset_esp_no_encrypt(self):
self.__esp_no_encrypt = YANGDynClass(base=YANGBool, is_leaf=True, yang_name="esp-no-encrypt", rest_name="esp-no-encrypt", parent=self, choice=(u'ch-algorithm', u'ca-esp-algorithm'), path_helper=self._path_helper, extmethods=self._extmethods, register_paths=True, extensions={u'tailf-common': {u'info': u'Do not encrypt the key ', u'cli-optional-in-sequence': None, u'hidden': u'full', u'cli-incomplete-command': None}}, namespace='urn:brocade.com:mgmt:brocade-ospfv3', defining_module='brocade-ospfv3', yang_type='empty', is_config=True)
def _get_esp_key(self):
"""
Getter method for esp_key, mapped from YANG variable /interface/ethernet/ipv6/interface_ospfv3_conf/authentication/ipsec_auth_key_config/esp_key (ipsec-authentication-hexkey-string)
YANG Description: Hexadecimal key string for ESP
"""
return self.__esp_key
def _set_esp_key(self, v, load=False):
"""
Setter method for esp_key, mapped from YANG variable /interface/ethernet/ipv6/interface_ospfv3_conf/authentication/ipsec_auth_key_config/esp_key (ipsec-authentication-hexkey-string)
If this variable is read-only (config: false) in the
source YANG file, then _set_esp_key is considered as a private
method. Backends looking to populate this variable should
do so via calling thisObj._set_esp_key() directly.
YANG Description: Hexadecimal key string for ESP
"""
if hasattr(v, "_utype"):
v = v._utype(v)
try:
t = YANGDynClass(v,base=RestrictedClassType(base_type=unicode, restriction_dict={'length': [u'0..80']}), is_leaf=True, yang_name="esp-key", rest_name="esp-key", parent=self, choice=(u'ch-algorithm', u'ca-esp-algorithm'), path_helper=self._path_helper, extmethods=self._extmethods, register_paths=True, extensions={u'tailf-common': {u'info': u'Hexadecimal key string for ESP', u'hidden': u'full', u'cli-incomplete-command': None}}, namespace='urn:brocade.com:mgmt:brocade-ospfv3', defining_module='brocade-ospfv3', yang_type='ipsec-authentication-hexkey-string', is_config=True)
except (TypeError, ValueError):
raise ValueError({
'error-string': """esp_key must be of a type compatible with ipsec-authentication-hexkey-string""",
'defined-type': "brocade-ospfv3:ipsec-authentication-hexkey-string",
'generated-type': """YANGDynClass(base=RestrictedClassType(base_type=unicode, restriction_dict={'length': [u'0..80']}), is_leaf=True, yang_name="esp-key", rest_name="esp-key", parent=self, choice=(u'ch-algorithm', u'ca-esp-algorithm'), path_helper=self._path_helper, extmethods=self._extmethods, register_paths=True, extensions={u'tailf-common': {u'info': u'Hexadecimal key string for ESP', u'hidden': u'full', u'cli-incomplete-command': None}}, namespace='urn:brocade.com:mgmt:brocade-ospfv3', defining_module='brocade-ospfv3', yang_type='ipsec-authentication-hexkey-string', is_config=True)""",
})
self.__esp_key = t
if hasattr(self, '_set'):
self._set()
def _unset_esp_key(self):
self.__esp_key = YANGDynClass(base=RestrictedClassType(base_type=unicode, restriction_dict={'length': [u'0..80']}), is_leaf=True, yang_name="esp-key", rest_name="esp-key", parent=self, choice=(u'ch-algorithm', u'ca-esp-algorithm'), path_helper=self._path_helper, extmethods=self._extmethods, register_paths=True, extensions={u'tailf-common': {u'info': u'Hexadecimal key string for ESP', u'hidden': u'full', u'cli-incomplete-command': None}}, namespace='urn:brocade.com:mgmt:brocade-ospfv3', defining_module='brocade-ospfv3', yang_type='ipsec-authentication-hexkey-string', is_config=True)
def _get_esp_auth(self):
"""
Getter method for esp_auth, mapped from YANG variable /interface/ethernet/ipv6/interface_ospfv3_conf/authentication/ipsec_auth_key_config/esp_auth (algorithm-type-ah)
"""
return self.__esp_auth
def _set_esp_auth(self, v, load=False):
"""
Setter method for esp_auth, mapped from YANG variable /interface/ethernet/ipv6/interface_ospfv3_conf/authentication/ipsec_auth_key_config/esp_auth (algorithm-type-ah)
If this variable is read-only (config: false) in the
source YANG file, then _set_esp_auth is considered as a private
method. Backends looking to populate this variable should
do so via calling thisObj._set_esp_auth() directly.
"""
if hasattr(v, "_utype"):
v = v._utype(v)
try:
t = YANGDynClass(v,base=RestrictedClassType(base_type=unicode, restriction_type="dict_key", restriction_arg={u'hmac-sha1': {'value': 2}, u'hmac-md5': {'value': 1}},), is_leaf=True, yang_name="esp-auth", rest_name="esp-auth", parent=self, choice=(u'ch-algorithm', u'ca-esp-algorithm'), path_helper=self._path_helper, extmethods=self._extmethods, register_paths=True, extensions={u'tailf-common': {u'info': u'Use Authentication Algorithm', u'cli-drop-node-name': None, u'cli-incomplete-command': None}}, namespace='urn:brocade.com:mgmt:brocade-ospfv3', defining_module='brocade-ospfv3', yang_type='algorithm-type-ah', is_config=True)
except (TypeError, ValueError):
raise ValueError({
'error-string': """esp_auth must be of a type compatible with algorithm-type-ah""",
'defined-type': "brocade-ospfv3:algorithm-type-ah",
'generated-type': """YANGDynClass(base=RestrictedClassType(base_type=unicode, restriction_type="dict_key", restriction_arg={u'hmac-sha1': {'value': 2}, u'hmac-md5': {'value': 1}},), is_leaf=True, yang_name="esp-auth", rest_name="esp-auth", parent=self, choice=(u'ch-algorithm', u'ca-esp-algorithm'), path_helper=self._path_helper, extmethods=self._extmethods, register_paths=True, extensions={u'tailf-common': {u'info': u'Use Authentication Algorithm', u'cli-drop-node-name': None, u'cli-incomplete-command': None}}, namespace='urn:brocade.com:mgmt:brocade-ospfv3', defining_module='brocade-ospfv3', yang_type='algorithm-type-ah', is_config=True)""",
})
self.__esp_auth = t
if hasattr(self, '_set'):
self._set()
def _unset_esp_auth(self):
self.__esp_auth = YANGDynClass(base=RestrictedClassType(base_type=unicode, restriction_type="dict_key", restriction_arg={u'hmac-sha1': {'value': 2}, u'hmac-md5': {'value': 1}},), is_leaf=True, yang_name="esp-auth", rest_name="esp-auth", parent=self, choice=(u'ch-algorithm', u'ca-esp-algorithm'), path_helper=self._path_helper, extmethods=self._extmethods, register_paths=True, extensions={u'tailf-common': {u'info': u'Use Authentication Algorithm', u'cli-drop-node-name': None, u'cli-incomplete-command': None}}, namespace='urn:brocade.com:mgmt:brocade-ospfv3', defining_module='brocade-ospfv3', yang_type='algorithm-type-ah', is_config=True)
def _get_esp_auth_no_encrypt(self):
"""
Getter method for esp_auth_no_encrypt, mapped from YANG variable /interface/ethernet/ipv6/interface_ospfv3_conf/authentication/ipsec_auth_key_config/esp_auth_no_encrypt (empty)
YANG Description: Do not encrypt the key
"""
return self.__esp_auth_no_encrypt
def _set_esp_auth_no_encrypt(self, v, load=False):
"""
Setter method for esp_auth_no_encrypt, mapped from YANG variable /interface/ethernet/ipv6/interface_ospfv3_conf/authentication/ipsec_auth_key_config/esp_auth_no_encrypt (empty)
If this variable is read-only (config: false) in the
source YANG file, then _set_esp_auth_no_encrypt is considered as a private
method. Backends looking to populate this variable should
do so via calling thisObj._set_esp_auth_no_encrypt() directly.
YANG Description: Do not encrypt the key
"""
if hasattr(v, "_utype"):
v = v._utype(v)
try:
t = YANGDynClass(v,base=YANGBool, is_leaf=True, yang_name="esp-auth-no-encrypt", rest_name="no-encrypt", parent=self, choice=(u'ch-algorithm', u'ca-esp-algorithm'), path_helper=self._path_helper, extmethods=self._extmethods, register_paths=True, extensions={u'tailf-common': {u'info': u'Do not encrypt the key ', u'cli-optional-in-sequence': None, u'hidden': u'full', u'alt-name': u'no-encrypt', u'cli-incomplete-command': None}}, namespace='urn:brocade.com:mgmt:brocade-ospfv3', defining_module='brocade-ospfv3', yang_type='empty', is_config=True)
except (TypeError, ValueError):
raise ValueError({
'error-string': """esp_auth_no_encrypt must be of a type compatible with empty""",
'defined-type': "empty",
'generated-type': """YANGDynClass(base=YANGBool, is_leaf=True, yang_name="esp-auth-no-encrypt", rest_name="no-encrypt", parent=self, choice=(u'ch-algorithm', u'ca-esp-algorithm'), path_helper=self._path_helper, extmethods=self._extmethods, register_paths=True, extensions={u'tailf-common': {u'info': u'Do not encrypt the key ', u'cli-optional-in-sequence': None, u'hidden': u'full', u'alt-name': u'no-encrypt', u'cli-incomplete-command': None}}, namespace='urn:brocade.com:mgmt:brocade-ospfv3', defining_module='brocade-ospfv3', yang_type='empty', is_config=True)""",
})
self.__esp_auth_no_encrypt = t
if hasattr(self, '_set'):
self._set()
def _unset_esp_auth_no_encrypt(self):
self.__esp_auth_no_encrypt = YANGDynClass(base=YANGBool, is_leaf=True, yang_name="esp-auth-no-encrypt", rest_name="no-encrypt", parent=self, choice=(u'ch-algorithm', u'ca-esp-algorithm'), path_helper=self._path_helper, extmethods=self._extmethods, register_paths=True, extensions={u'tailf-common': {u'info': u'Do not encrypt the key ', u'cli-optional-in-sequence': None, u'hidden': u'full', u'alt-name': u'no-encrypt', u'cli-incomplete-command': None}}, namespace='urn:brocade.com:mgmt:brocade-ospfv3', defining_module='brocade-ospfv3', yang_type='empty', is_config=True)
def _get_esp_auth_key(self):
"""
Getter method for esp_auth_key, mapped from YANG variable /interface/ethernet/ipv6/interface_ospfv3_conf/authentication/ipsec_auth_key_config/esp_auth_key (ipsec-authentication-hexkey-string)
YANG Description: Hexadecimal key string for authentication algorithm
"""
return self.__esp_auth_key
def _set_esp_auth_key(self, v, load=False):
"""
Setter method for esp_auth_key, mapped from YANG variable /interface/ethernet/ipv6/interface_ospfv3_conf/authentication/ipsec_auth_key_config/esp_auth_key (ipsec-authentication-hexkey-string)
If this variable is read-only (config: false) in the
source YANG file, then _set_esp_auth_key is considered as a private
method. Backends looking to populate this variable should
do so via calling thisObj._set_esp_auth_key() directly.
YANG Description: Hexadecimal key string for authentication algorithm
"""
if hasattr(v, "_utype"):
v = v._utype(v)
try:
t = YANGDynClass(v,base=RestrictedClassType(base_type=unicode, restriction_dict={'length': [u'0..80']}), is_leaf=True, yang_name="esp-auth-key", rest_name="key", parent=self, choice=(u'ch-algorithm', u'ca-esp-algorithm'), path_helper=self._path_helper, extmethods=self._extmethods, register_paths=True, extensions={u'tailf-common': {u'info': u'Hexadecimal key string for authentication algorithm', u'alt-name': u'key', u'suppress-echo': u'true'}}, namespace='urn:brocade.com:mgmt:brocade-ospfv3', defining_module='brocade-ospfv3', yang_type='ipsec-authentication-hexkey-string', is_config=True)
except (TypeError, ValueError):
raise ValueError({
'error-string': """esp_auth_key must be of a type compatible with ipsec-authentication-hexkey-string""",
'defined-type': "brocade-ospfv3:ipsec-authentication-hexkey-string",
'generated-type': """YANGDynClass(base=RestrictedClassType(base_type=unicode, restriction_dict={'length': [u'0..80']}), is_leaf=True, yang_name="esp-auth-key", rest_name="key", parent=self, choice=(u'ch-algorithm', u'ca-esp-algorithm'), path_helper=self._path_helper, extmethods=self._extmethods, register_paths=True, extensions={u'tailf-common': {u'info': u'Hexadecimal key string for authentication algorithm', u'alt-name': u'key', u'suppress-echo': u'true'}}, namespace='urn:brocade.com:mgmt:brocade-ospfv3', defining_module='brocade-ospfv3', yang_type='ipsec-authentication-hexkey-string', is_config=True)""",
})
self.__esp_auth_key = t
if hasattr(self, '_set'):
self._set()
def _unset_esp_auth_key(self):
self.__esp_auth_key = YANGDynClass(base=RestrictedClassType(base_type=unicode, restriction_dict={'length': [u'0..80']}), is_leaf=True, yang_name="esp-auth-key", rest_name="key", parent=self, choice=(u'ch-algorithm', u'ca-esp-algorithm'), path_helper=self._path_helper, extmethods=self._extmethods, register_paths=True, extensions={u'tailf-common': {u'info': u'Hexadecimal key string for authentication algorithm', u'alt-name': u'key', u'suppress-echo': u'true'}}, namespace='urn:brocade.com:mgmt:brocade-ospfv3', defining_module='brocade-ospfv3', yang_type='ipsec-authentication-hexkey-string', is_config=True)
spi = __builtin__.property(_get_spi, _set_spi)
ah = __builtin__.property(_get_ah, _set_ah)
ah_no_encrypt = __builtin__.property(_get_ah_no_encrypt, _set_ah_no_encrypt)
ah_key = __builtin__.property(_get_ah_key, _set_ah_key)
esp = __builtin__.property(_get_esp, _set_esp)
esp_no_encrypt = __builtin__.property(_get_esp_no_encrypt, _set_esp_no_encrypt)
esp_key = __builtin__.property(_get_esp_key, _set_esp_key)
esp_auth = __builtin__.property(_get_esp_auth, _set_esp_auth)
esp_auth_no_encrypt = __builtin__.property(_get_esp_auth_no_encrypt, _set_esp_auth_no_encrypt)
esp_auth_key = __builtin__.property(_get_esp_auth_key, _set_esp_auth_key)
__choices__ = {u'ch-algorithm': {u'ca-ah-algorithm': [u'ah', u'ah_no_encrypt', u'ah_key'], u'ca-esp-algorithm': [u'esp', u'esp_no_encrypt', u'esp_key', u'esp_auth', u'esp_auth_no_encrypt', u'esp_auth_key']}}
_pyangbind_elements = {'spi': spi, 'ah': ah, 'ah_no_encrypt': ah_no_encrypt, 'ah_key': ah_key, 'esp': esp, 'esp_no_encrypt': esp_no_encrypt, 'esp_key': esp_key, 'esp_auth': esp_auth, 'esp_auth_no_encrypt': esp_auth_no_encrypt, 'esp_auth_key': esp_auth_key, }
| 87.687764
| 732
| 0.722091
| 5,916
| 41,564
| 4.852603
| 0.039723
| 0.029783
| 0.023408
| 0.019507
| 0.922704
| 0.902849
| 0.889438
| 0.882193
| 0.8692
| 0.860945
| 0
| 0.008732
| 0.134828
| 41,564
| 473
| 733
| 87.87315
| 0.7896
| 0.178376
| 0
| 0.475836
| 0
| 0.037175
| 0.425663
| 0.184362
| 0
| 0
| 0
| 0
| 0
| 1
| 0.122677
| false
| 0
| 0.02974
| 0
| 0.271375
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 6
|
81f52e3bfa631c804888a9d9932b81592422b6e8
| 25
|
py
|
Python
|
models/__init__.py
|
zyn1030z/owl_tree_view
|
787dbb926815dc529c40695d52d67eea24c0b705
|
[
"MIT"
] | 3
|
2021-09-22T07:39:29.000Z
|
2021-12-16T09:10:45.000Z
|
models/__init__.py
|
zyn1030z/owl_tree_view
|
787dbb926815dc529c40695d52d67eea24c0b705
|
[
"MIT"
] | 1
|
2021-11-11T11:50:49.000Z
|
2021-11-12T08:35:47.000Z
|
models/__init__.py
|
zyn1030z/owl_tree_view
|
787dbb926815dc529c40695d52d67eea24c0b705
|
[
"MIT"
] | 2
|
2021-11-04T03:10:45.000Z
|
2021-11-30T21:58:25.000Z
|
from . import ir_ui_view
| 12.5
| 24
| 0.8
| 5
| 25
| 3.6
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.16
| 25
| 1
| 25
| 25
| 0.857143
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| true
| 0
| 1
| 0
| 1
| 0
| 1
| 1
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 0
| 1
| 0
|
0
| 6
|
c307232281c79b7e2032d265d22beb34ae0aa1b8
| 151
|
py
|
Python
|
rta/cv/__init__.py
|
MatteoLacki/rta
|
93944d6fc934126e0bb4d076c8b4213cadbe49a1
|
[
"BSD-2-Clause"
] | 1
|
2018-05-31T14:31:18.000Z
|
2018-05-31T14:31:18.000Z
|
rta/cv/__init__.py
|
MatteoLacki/rta
|
93944d6fc934126e0bb4d076c8b4213cadbe49a1
|
[
"BSD-2-Clause"
] | null | null | null |
rta/cv/__init__.py
|
MatteoLacki/rta
|
93944d6fc934126e0bb4d076c8b4213cadbe49a1
|
[
"BSD-2-Clause"
] | null | null | null |
"""The preprocessing submodule.
Tools for preprocessing data.
"""
from .grouped_k_folds import grouped_K_folds
from .filters import filter_K_foldable
| 21.571429
| 44
| 0.821192
| 21
| 151
| 5.619048
| 0.666667
| 0.135593
| 0.220339
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.112583
| 151
| 7
| 45
| 21.571429
| 0.880597
| 0.390728
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| true
| 0
| 1
| 0
| 1
| 0
| 1
| 0
| 0
| null | 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 0
| 1
| 0
|
0
| 6
|
c345c317fad3c228bee55ebb0b1c58eceefe0ce4
| 7,775
|
py
|
Python
|
demo_v1.py
|
kmautonsys/bayesSIR
|
638b21eb03831ede421816d17368ed6d28aed492
|
[
"BSD-3-Clause"
] | 2
|
2020-04-06T18:09:16.000Z
|
2020-04-30T19:34:34.000Z
|
demo_v1.py
|
kmautonsys/bayesSIR
|
638b21eb03831ede421816d17368ed6d28aed492
|
[
"BSD-3-Clause"
] | null | null | null |
demo_v1.py
|
kmautonsys/bayesSIR
|
638b21eb03831ede421816d17368ed6d28aed492
|
[
"BSD-3-Clause"
] | null | null | null |
#!/usr/bin/env python3
import pystan
import numpy as np
import json
import requests
import pandas as pd
import matplotlib.pyplot as plt
config = []
r=requests.get("https://covidtracking.com/api/states/daily")
state_data = pd.DataFrame(r.json())
def load_config(file,data):
"""
This loads a config file for a stan run. Config file is assumed to be in JSON format.
The data file is assumed to have positive, deaths, and hospitalized features. Postive refers to positive tests, a proxy for confirmed cases.
This need some error checking, particularlly for the case that a required parameter is missing.
"""
config = dict()
with open(file) as f:
config = json.load(f)
#Adjust the data types
for key in config.keys():
if isinstance(config[key],list):
config[key] = np.array(config[key])
with open('./state_populations.json') as f:
state_pops = json.load(f)
for key in state_pops.keys():
state_pops[key] = int(state_pops[key])
# look up time series for the state
state = config['state']
state_data = data[data.state==state]
state_pop = state_pops[state]
# These parameters can be loaded from the data
config['dat_cases'] = np.flip(state_data.positive.values.astype('int'))
config['dat_caseNA'] = np.isnan(state_data.positive.values).astype('int')
config['dat_deaths'] = np.flip(np.nan_to_num(state_data.death.values).astype('int'))
config['dat_deathNA'] = np.isnan(state_data.death.values).astype('int') # NA deaths should be forward-filled
config['dat_hospitalized'] = np.flip(state_data.hospitalized.values.astype('int'))
config['dat_hospNA'] = np.isnan(state_data.hospitalized.values).astype('int') #NA hospitalizations should be forward filled
dat_ts = np.flip(np.array(state_data.date.values))
# fill in the derived parameters and adjust data types
config['dat_ts'] = dat_ts - np.max(dat_ts) -1
config['TotalPop'] = float(state_pop)
# config["nonCOVIDMU"] = 0.1*config['TotalPop']
config['M'] = len(config["tranMU"])
config['Q'] = len(config["dat_ts"])
#config['recMU']=np.log(config['recMU'])
config['S0MU'] = np.log(0.5*state_pop)
# Forecast parameters
ts = 1+np.arange(config['N'])
config["ts"] = ts
config.pop('state',None)
#print(config)
return config
def describe_fit(fit):
"""
Simple analysis of the fitted model and display of the population evaluation
"""
summary = fit.summary()
summary = pd.DataFrame(summary['summary'], index = summary['summary_rownames'], columns = summary['summary_colnames'])
print("Estimated transmited rate: {:.2f} ({:.2f})".format(summary['mean']['tran[1]'], summary['sd']['tran[1]']))
print("Recovery time: {:.2f} ({:.2f}) days".format(summary['mean'].rec, summary['sd'].rec))
print("Proportion population affected: {:.2f}% ({:.2f})".format(summary['mean'].ConfirmProportion_* 100, summary['sd'].ConfirmProportion_ * 100))
# Evolution Cases
for (group, color, id) in [('Infected', 'orange', 'I'), ('Dead', 'red', 'Deaths')]:
training, prediction = summary.loc[[i for i in summary.index if 'Fit{}['.format(id) in i]], summary.loc[[i for i in summary.index if ('{}['.format(id) in i) and ('Fit' not in i)]]
plt.plot(np.arange(-len(training),0) + 1, training['mean'], alpha = 0.5, color = color)
plt.fill_between(np.arange(-len(training),0) + 1, training['2.5%'], training['97.5%'], alpha = 0.25, color = color)
plt.plot(np.arange(len(prediction) + 1), [training['mean'].iloc[-1]] + prediction['mean'].tolist(), label = group, ls = ':', color = color)
plt.fill_between(np.arange(len(prediction) + 1), [training['2.5%'].iloc[-1]] + prediction['2.5%'].tolist(), [training['97.5%'][-1]] + prediction['97.5%'].tolist(), alpha = 0.5, color = color)
plt.xlabel('Days')
plt.ylabel('Number')
plt.legend()
plt.show()
dat = load_config('ny_conf.json',state_data)
#print(dat)
sm = pystan.StanModel(file="bayesSIRv1.1.stan")
#fit = sm.sampling(data=dat, iter=1000, chains=4)
fit = sm.optimizing(data=dat)
#describe_fit(fit)
# #!/usr/bin/env python3
# import pystan
# import numpy as np
# import json
# import requests
# import pandas as pd
# import matplotlib.pyplot as plt
# r=requests.get("https://covidtracking.com/api/states/daily")
# state_data = pd.DataFrame(r.json())
# config = []
# # outbreak priors
# def load_config(file,data):
# config = dict()
# with open(file) as f:
# config = json.load(f)
# #Adjust the data types
# for key in config.keys():
# if isinstance(config[key],list):
# config[key] = np.array(config[key])
# # look up time series for the state
# state = config['state']
# state_data = data[data.state==state]
# config['dat_cases'] = np.flip(state_data.positive.values.astype('int'))
# config['dat_tests'] = np.flip(state_data.totalTestResults.values.astype('int'))
# config['dat_deaths'] = np.flip(np.nan_to_num(state_data.death.values).astype('int'))
# config['dat_deathNA'] = np.isnan(state_data.death.values).astype('int')
# #config['dat_hospitalized'] = state_data.hospitalized.values.astype('int')
# dat_ts = np.flip(np.array(state_data.date.values))
# config['dat_ts'] = dat_ts - np.max(dat_ts) -1
# config["TotalPop"] = 19440469 # this is new york specific
# config["nonCOVIDMU"] = 0.1*config['TotalPop']
# #fill in the derived parameters
# config["I0MU"] = max(config["dat_cases"]).astype('int') # mean current infected population
# config["R0MU"] = max(config["dat_cases"]).astype('int') # mean current recovered population
# config["M"] = len(config["tranMU"])
# config["Q"] = len(config["dat_ts"])
# config["DeathMU"] = 0
# config["DeathSD"] = 2
# # Forecast parameters
# ts = 1+np.arange(config['N'])
# config["ts"] = ts
# config.pop('state',None)
# return config
# def describe_fit(fit):
# """
# Simple analysis of the fitted model and display of the population evaluation
# """
# summary = fit.summary()
# summary = pd.DataFrame(summary['summary'], index = summary['summary_rownames'], columns = summary['summary_colnames'])
# print("Estimated transmited rate: {:.2f} ({:.2f})".format(summary['mean']['tran[1]'], summary['sd']['tran[1]']))
# print("Recovery time: {:.2f} ({:.2f}) days".format(summary['mean'].rec, summary['sd'].rec))
# print("Proportion population affected: {:.2f}% ({:.2f})".format(summary['mean'].ConfirmProportion_* 100, summary['sd'].ConfirmProportion_ * 100))
# # Evolution Cases
# for (group, color, id) in [('Infected', 'orange', 'I'), ('Dead', 'red', 'Deaths')]:
# training, prediction = summary.loc[[i for i in summary.index if 'Fit{}['.format(id) in i]], summary.loc[[i for i in summary.index if ('{}['.format(id) in i) and ('Fit' not in i)]]
# plt.plot(np.arange(-1*len(training),0) + 1, training['mean'], alpha = 0.5, color = color)
# plt.fill_between(np.arange(-len(training),0) + 1, training['2.5%'], training['97.5%'], alpha = 0.25, color = color)
# plt.plot(np.arange(len(prediction) + 1), [training['mean'][-1]] + prediction['mean'].tolist(), label = group, ls = ':', color = color)
# plt.fill_between(np.arange(len(prediction) + 1), [training['2.5%'][-1]] + prediction['2.5%'].tolist(), [training['97.5%'][-1]] + prediction['97.5%'].tolist(), alpha = 0.5, color = color)
# plt.xlabel('Days')
# plt.ylabel('Number')
# plt.legend()
# plt.show()
# dat = load_config('ny_conf.json',state_data)
# sm = pystan.StanModel(file="bayesSIRv1.1.stan")
# fit = sm.sampling(data=dat, iter=4000, chains=4)
# describe_fit(fit)
| 44.683908
| 199
| 0.642444
| 1,089
| 7,775
| 4.51607
| 0.195592
| 0.03477
| 0.03355
| 0.03416
| 0.818625
| 0.797682
| 0.762708
| 0.762708
| 0.742375
| 0.742375
| 0
| 0.020174
| 0.17119
| 7,775
| 173
| 200
| 44.942197
| 0.743017
| 0.556013
| 0
| 0
| 0
| 0
| 0.1538
| 0.007238
| 0
| 0
| 0
| 0
| 0
| 1
| 0.034483
| false
| 0
| 0.103448
| 0
| 0.155172
| 0.051724
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 6
|
c34d99a0bdf22d14f9460e931b87649357237ea2
| 22
|
py
|
Python
|
access_token_service/__init__.py
|
HumanBrainProject/jupyterhub-access-token-service
|
6da05ce0ecaab57c23a30806e773bdacbcbe8a30
|
[
"Apache-2.0"
] | null | null | null |
access_token_service/__init__.py
|
HumanBrainProject/jupyterhub-access-token-service
|
6da05ce0ecaab57c23a30806e773bdacbcbe8a30
|
[
"Apache-2.0"
] | null | null | null |
access_token_service/__init__.py
|
HumanBrainProject/jupyterhub-access-token-service
|
6da05ce0ecaab57c23a30806e773bdacbcbe8a30
|
[
"Apache-2.0"
] | null | null | null |
from .ats import main
| 11
| 21
| 0.772727
| 4
| 22
| 4.25
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.181818
| 22
| 1
| 22
| 22
| 0.944444
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| true
| 0
| 1
| 0
| 1
| 0
| 1
| 1
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 0
| 1
| 0
|
0
| 6
|
c35b93bd85c3f2c046c395cca65e59b91375e333
| 21,613
|
py
|
Python
|
docker-images/taigav2/taiga-back/tests/integration/resources_permissions/test_epics_custom_attributes_resource.py
|
mattcongy/itshop
|
6be025a9eaa7fe7f495b5777d1f0e5a3184121c9
|
[
"MIT"
] | 1
|
2017-05-29T19:01:06.000Z
|
2017-05-29T19:01:06.000Z
|
docker-images/taigav2/taiga-back/tests/integration/resources_permissions/test_epics_custom_attributes_resource.py
|
mattcongy/itshop
|
6be025a9eaa7fe7f495b5777d1f0e5a3184121c9
|
[
"MIT"
] | null | null | null |
docker-images/taigav2/taiga-back/tests/integration/resources_permissions/test_epics_custom_attributes_resource.py
|
mattcongy/itshop
|
6be025a9eaa7fe7f495b5777d1f0e5a3184121c9
|
[
"MIT"
] | null | null | null |
# -*- coding: utf-8 -*-
# Copyright (C) 2014-2016 Andrey Antukh <niwi@niwi.nz>
# Copyright (C) 2014-2016 Jesús Espino <jespinog@gmail.com>
# Copyright (C) 2014-2016 David Barragán <bameda@dbarragan.com>
# Copyright (C) 2014-2016 Alejandro Alonso <alejandro.alonso@kaleidos.net>
# This program is free software: you can redistribute it and/or modify
# it under the terms of the GNU Affero General Public License as
# published by the Free Software Foundation, either version 3 of the
# License, or (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU Affero General Public License for more details.
#
# You should have received a copy of the GNU Affero General Public License
# along with this program. If not, see <http://www.gnu.org/licenses/>.
from django.core.urlresolvers import reverse
from taiga.base.utils import json
from taiga.projects import choices as project_choices
from taiga.projects.custom_attributes import serializers
from taiga.permissions.choices import (MEMBERS_PERMISSIONS,
ANON_PERMISSIONS)
from tests import factories as f
from tests.utils import helper_test_http_method
import pytest
pytestmark = pytest.mark.django_db
@pytest.fixture
def data():
m = type("Models", (object,), {})
m.registered_user = f.UserFactory.create()
m.project_member_with_perms = f.UserFactory.create()
m.project_member_without_perms = f.UserFactory.create()
m.project_owner = f.UserFactory.create()
m.other_user = f.UserFactory.create()
m.superuser = f.UserFactory.create(is_superuser=True)
m.public_project = f.ProjectFactory(is_private=False,
anon_permissions=list(map(lambda x: x[0], ANON_PERMISSIONS)),
public_permissions=list(map(lambda x: x[0], ANON_PERMISSIONS)),
owner=m.project_owner)
m.private_project1 = f.ProjectFactory(is_private=True,
anon_permissions=list(map(lambda x: x[0], ANON_PERMISSIONS)),
public_permissions=list(map(lambda x: x[0], ANON_PERMISSIONS)),
owner=m.project_owner)
m.private_project2 = f.ProjectFactory(is_private=True,
anon_permissions=[],
public_permissions=[],
owner=m.project_owner)
m.blocked_project = f.ProjectFactory(is_private=True,
anon_permissions=[],
public_permissions=[],
owner=m.project_owner,
blocked_code=project_choices.BLOCKED_BY_STAFF)
m.public_membership = f.MembershipFactory(project=m.public_project,
user=m.project_member_with_perms,
email=m.project_member_with_perms.email,
role__project=m.public_project,
role__permissions=list(map(lambda x: x[0], MEMBERS_PERMISSIONS)))
m.private_membership1 = f.MembershipFactory(project=m.private_project1,
user=m.project_member_with_perms,
email=m.project_member_with_perms.email,
role__project=m.private_project1,
role__permissions=list(map(lambda x: x[0], MEMBERS_PERMISSIONS)))
f.MembershipFactory(project=m.private_project1,
user=m.project_member_without_perms,
email=m.project_member_without_perms.email,
role__project=m.private_project1,
role__permissions=[])
m.private_membership2 = f.MembershipFactory(project=m.private_project2,
user=m.project_member_with_perms,
email=m.project_member_with_perms.email,
role__project=m.private_project2,
role__permissions=list(map(lambda x: x[0], MEMBERS_PERMISSIONS)))
f.MembershipFactory(project=m.private_project2,
user=m.project_member_without_perms,
email=m.project_member_without_perms.email,
role__project=m.private_project2,
role__permissions=[])
m.blocked_membership = f.MembershipFactory(project=m.blocked_project,
user=m.project_member_with_perms,
email=m.project_member_with_perms.email,
role__project=m.blocked_project,
role__permissions=list(map(lambda x: x[0], MEMBERS_PERMISSIONS)))
f.MembershipFactory(project=m.blocked_project,
user=m.project_member_without_perms,
email=m.project_member_without_perms.email,
role__project=m.blocked_project,
role__permissions=[])
f.MembershipFactory(project=m.public_project,
user=m.project_owner,
is_admin=True)
f.MembershipFactory(project=m.private_project1,
user=m.project_owner,
is_admin=True)
f.MembershipFactory(project=m.private_project2,
user=m.project_owner,
is_admin=True)
f.MembershipFactory(project=m.blocked_project,
user=m.project_owner,
is_admin=True)
m.public_epic_ca = f.EpicCustomAttributeFactory(project=m.public_project)
m.private_epic_ca1 = f.EpicCustomAttributeFactory(project=m.private_project1)
m.private_epic_ca2 = f.EpicCustomAttributeFactory(project=m.private_project2)
m.blocked_epic_ca = f.EpicCustomAttributeFactory(project=m.blocked_project)
m.public_epic = f.EpicFactory(project=m.public_project,
status__project=m.public_project)
m.private_epic1 = f.EpicFactory(project=m.private_project1,
status__project=m.private_project1)
m.private_epic2 = f.EpicFactory(project=m.private_project2,
status__project=m.private_project2)
m.blocked_epic = f.EpicFactory(project=m.blocked_project,
status__project=m.blocked_project)
m.public_epic_cav = m.public_epic.custom_attributes_values
m.private_epic_cav1 = m.private_epic1.custom_attributes_values
m.private_epic_cav2 = m.private_epic2.custom_attributes_values
m.blocked_epic_cav = m.blocked_epic.custom_attributes_values
return m
#########################################################
# Epic Custom Attribute
#########################################################
def test_epic_custom_attribute_retrieve(client, data):
public_url = reverse('epic-custom-attributes-detail', kwargs={"pk": data.public_epic_ca.pk})
private1_url = reverse('epic-custom-attributes-detail', kwargs={"pk": data.private_epic_ca1.pk})
private2_url = reverse('epic-custom-attributes-detail', kwargs={"pk": data.private_epic_ca2.pk})
blocked_url = reverse('epic-custom-attributes-detail', kwargs={"pk": data.blocked_epic_ca.pk})
users = [
None,
data.registered_user,
data.project_member_without_perms,
data.project_member_with_perms,
data.project_owner
]
results = helper_test_http_method(client, 'get', public_url, None, users)
assert results == [200, 200, 200, 200, 200]
results = helper_test_http_method(client, 'get', private1_url, None, users)
assert results == [200, 200, 200, 200, 200]
results = helper_test_http_method(client, 'get', private2_url, None, users)
assert results == [401, 403, 403, 200, 200]
results = helper_test_http_method(client, 'get', blocked_url, None, users)
assert results == [401, 403, 403, 200, 200]
def test_epic_custom_attribute_create(client, data):
public_url = reverse('epic-custom-attributes-list')
private1_url = reverse('epic-custom-attributes-list')
private2_url = reverse('epic-custom-attributes-list')
blocked_url = reverse('epic-custom-attributes-list')
users = [
None,
data.registered_user,
data.project_member_without_perms,
data.project_member_with_perms,
data.project_owner
]
epic_ca_data = {"name": "test-new", "project": data.public_project.id}
epic_ca_data = json.dumps(epic_ca_data)
results = helper_test_http_method(client, 'post', public_url, epic_ca_data, users)
assert results == [401, 403, 403, 403, 201]
epic_ca_data = {"name": "test-new", "project": data.private_project1.id}
epic_ca_data = json.dumps(epic_ca_data)
results = helper_test_http_method(client, 'post', private1_url, epic_ca_data, users)
assert results == [401, 403, 403, 403, 201]
epic_ca_data = {"name": "test-new", "project": data.private_project2.id}
epic_ca_data = json.dumps(epic_ca_data)
results = helper_test_http_method(client, 'post', private2_url, epic_ca_data, users)
assert results == [401, 403, 403, 403, 201]
epic_ca_data = {"name": "test-new", "project": data.blocked_project.id}
epic_ca_data = json.dumps(epic_ca_data)
results = helper_test_http_method(client, 'post', blocked_url, epic_ca_data, users)
assert results == [401, 403, 403, 403, 451]
def test_epic_custom_attribute_update(client, data):
public_url = reverse('epic-custom-attributes-detail', kwargs={"pk": data.public_epic_ca.pk})
private1_url = reverse('epic-custom-attributes-detail', kwargs={"pk": data.private_epic_ca1.pk})
private2_url = reverse('epic-custom-attributes-detail', kwargs={"pk": data.private_epic_ca2.pk})
blocked_url = reverse('epic-custom-attributes-detail', kwargs={"pk": data.blocked_epic_ca.pk})
users = [
None,
data.registered_user,
data.project_member_without_perms,
data.project_member_with_perms,
data.project_owner
]
epic_ca_data = serializers.EpicCustomAttributeSerializer(data.public_epic_ca).data
epic_ca_data["name"] = "test"
epic_ca_data = json.dumps(epic_ca_data)
results = helper_test_http_method(client, 'put', public_url, epic_ca_data, users)
assert results == [401, 403, 403, 403, 200]
epic_ca_data = serializers.EpicCustomAttributeSerializer(data.private_epic_ca1).data
epic_ca_data["name"] = "test"
epic_ca_data = json.dumps(epic_ca_data)
results = helper_test_http_method(client, 'put', private1_url, epic_ca_data, users)
assert results == [401, 403, 403, 403, 200]
epic_ca_data = serializers.EpicCustomAttributeSerializer(data.private_epic_ca2).data
epic_ca_data["name"] = "test"
epic_ca_data = json.dumps(epic_ca_data)
results = helper_test_http_method(client, 'put', private2_url, epic_ca_data, users)
assert results == [401, 403, 403, 403, 200]
epic_ca_data = serializers.EpicCustomAttributeSerializer(data.blocked_epic_ca).data
epic_ca_data["name"] = "test"
epic_ca_data = json.dumps(epic_ca_data)
results = helper_test_http_method(client, 'put', private2_url, epic_ca_data, users)
assert results == [401, 403, 403, 403, 451]
def test_epic_custom_attribute_delete(client, data):
public_url = reverse('epic-custom-attributes-detail', kwargs={"pk": data.public_epic_ca.pk})
private1_url = reverse('epic-custom-attributes-detail', kwargs={"pk": data.private_epic_ca1.pk})
private2_url = reverse('epic-custom-attributes-detail', kwargs={"pk": data.private_epic_ca2.pk})
blocked_url = reverse('epic-custom-attributes-detail', kwargs={"pk": data.blocked_epic_ca.pk})
users = [
None,
data.registered_user,
data.project_member_without_perms,
data.project_member_with_perms,
data.project_owner
]
results = helper_test_http_method(client, 'delete', public_url, None, users)
assert results == [401, 403, 403, 403, 204]
results = helper_test_http_method(client, 'delete', private1_url, None, users)
assert results == [401, 403, 403, 403, 204]
results = helper_test_http_method(client, 'delete', private2_url, None, users)
assert results == [401, 403, 403, 403, 204]
results = helper_test_http_method(client, 'delete', blocked_url, None, users)
assert results == [401, 403, 403, 403, 451]
def test_epic_custom_attribute_list(client, data):
url = reverse('epic-custom-attributes-list')
response = client.json.get(url)
assert len(response.data) == 2
assert response.status_code == 200
client.login(data.registered_user)
response = client.json.get(url)
assert len(response.data) == 2
assert response.status_code == 200
client.login(data.project_member_without_perms)
response = client.json.get(url)
assert len(response.data) == 2
assert response.status_code == 200
client.login(data.project_member_with_perms)
response = client.json.get(url)
assert len(response.data) == 4
assert response.status_code == 200
client.login(data.project_owner)
response = client.json.get(url)
assert len(response.data) == 4
assert response.status_code == 200
def test_epic_custom_attribute_patch(client, data):
public_url = reverse('epic-custom-attributes-detail', kwargs={"pk": data.public_epic_ca.pk})
private1_url = reverse('epic-custom-attributes-detail', kwargs={"pk": data.private_epic_ca1.pk})
private2_url = reverse('epic-custom-attributes-detail', kwargs={"pk": data.private_epic_ca2.pk})
blocked_url = reverse('epic-custom-attributes-detail', kwargs={"pk": data.blocked_epic_ca.pk})
users = [
None,
data.registered_user,
data.project_member_without_perms,
data.project_member_with_perms,
data.project_owner
]
results = helper_test_http_method(client, 'patch', public_url, '{"name": "Test"}', users)
assert results == [401, 403, 403, 403, 200]
results = helper_test_http_method(client, 'patch', private1_url, '{"name": "Test"}', users)
assert results == [401, 403, 403, 403, 200]
results = helper_test_http_method(client, 'patch', private2_url, '{"name": "Test"}', users)
assert results == [401, 403, 403, 403, 200]
results = helper_test_http_method(client, 'patch', blocked_url, '{"name": "Test"}', users)
assert results == [401, 403, 403, 403, 451]
def test_epic_custom_attribute_action_bulk_update_order(client, data):
url = reverse('epic-custom-attributes-bulk-update-order')
users = [
None,
data.registered_user,
data.project_member_without_perms,
data.project_member_with_perms,
data.project_owner
]
post_data = json.dumps({
"bulk_epic_custom_attributes": [(1,2)],
"project": data.public_project.pk
})
results = helper_test_http_method(client, 'post', url, post_data, users)
assert results == [401, 403, 403, 403, 204]
post_data = json.dumps({
"bulk_epic_custom_attributes": [(1,2)],
"project": data.private_project1.pk
})
results = helper_test_http_method(client, 'post', url, post_data, users)
assert results == [401, 403, 403, 403, 204]
post_data = json.dumps({
"bulk_epic_custom_attributes": [(1,2)],
"project": data.private_project2.pk
})
results = helper_test_http_method(client, 'post', url, post_data, users)
assert results == [401, 403, 403, 403, 204]
post_data = json.dumps({
"bulk_epic_custom_attributes": [(1,2)],
"project": data.blocked_project.pk
})
results = helper_test_http_method(client, 'post', url, post_data, users)
assert results == [401, 403, 403, 403, 451]
#########################################################
# Epic Custom Attribute
#########################################################
def test_epic_custom_attributes_values_retrieve(client, data):
public_url = reverse('epic-custom-attributes-values-detail', kwargs={"epic_id": data.public_epic.pk})
private_url1 = reverse('epic-custom-attributes-values-detail', kwargs={"epic_id": data.private_epic1.pk})
private_url2 = reverse('epic-custom-attributes-values-detail', kwargs={"epic_id": data.private_epic2.pk})
blocked_url = reverse('epic-custom-attributes-values-detail', kwargs={"epic_id": data.blocked_epic.pk})
users = [
None,
data.registered_user,
data.project_member_without_perms,
data.project_member_with_perms,
data.project_owner
]
results = helper_test_http_method(client, 'get', public_url, None, users)
assert results == [200, 200, 200, 200, 200]
results = helper_test_http_method(client, 'get', private_url1, None, users)
assert results == [200, 200, 200, 200, 200]
results = helper_test_http_method(client, 'get', private_url2, None, users)
assert results == [401, 403, 403, 200, 200]
results = helper_test_http_method(client, 'get', blocked_url, None, users)
assert results == [401, 403, 403, 200, 200]
def test_epic_custom_attributes_values_update(client, data):
public_url = reverse('epic-custom-attributes-values-detail', kwargs={"epic_id": data.public_epic.pk})
private_url1 = reverse('epic-custom-attributes-values-detail', kwargs={"epic_id": data.private_epic1.pk})
private_url2 = reverse('epic-custom-attributes-values-detail', kwargs={"epic_id": data.private_epic2.pk})
blocked_url = reverse('epic-custom-attributes-values-detail', kwargs={"epic_id": data.blocked_epic.pk})
users = [
None,
data.registered_user,
data.project_member_without_perms,
data.project_member_with_perms,
data.project_owner
]
epic_data = serializers.EpicCustomAttributesValuesSerializer(data.public_epic_cav).data
epic_data["attributes_values"] = {str(data.public_epic_ca.pk): "test"}
epic_data = json.dumps(epic_data)
results = helper_test_http_method(client, 'put', public_url, epic_data, users)
assert results == [401, 403, 403, 200, 200]
epic_data = serializers.EpicCustomAttributesValuesSerializer(data.private_epic_cav1).data
epic_data["attributes_values"] = {str(data.private_epic_ca1.pk): "test"}
epic_data = json.dumps(epic_data)
results = helper_test_http_method(client, 'put', private_url1, epic_data, users)
assert results == [401, 403, 403, 200, 200]
epic_data = serializers.EpicCustomAttributesValuesSerializer(data.private_epic_cav2).data
epic_data["attributes_values"] = {str(data.private_epic_ca2.pk): "test"}
epic_data = json.dumps(epic_data)
results = helper_test_http_method(client, 'put', private_url2, epic_data, users)
assert results == [401, 403, 403, 200, 200]
epic_data = serializers.EpicCustomAttributesValuesSerializer(data.blocked_epic_cav).data
epic_data["attributes_values"] = {str(data.blocked_epic_ca.pk): "test"}
epic_data = json.dumps(epic_data)
results = helper_test_http_method(client, 'put', blocked_url, epic_data, users)
assert results == [401, 403, 403, 451, 451]
def test_epic_custom_attributes_values_patch(client, data):
public_url = reverse('epic-custom-attributes-values-detail', kwargs={"epic_id": data.public_epic.pk})
private_url1 = reverse('epic-custom-attributes-values-detail', kwargs={"epic_id": data.private_epic1.pk})
private_url2 = reverse('epic-custom-attributes-values-detail', kwargs={"epic_id": data.private_epic2.pk})
blocked_url = reverse('epic-custom-attributes-values-detail', kwargs={"epic_id": data.blocked_epic.pk})
users = [
None,
data.registered_user,
data.project_member_without_perms,
data.project_member_with_perms,
data.project_owner
]
patch_data = json.dumps({"attributes_values": {str(data.public_epic_ca.pk): "test"},
"version": data.public_epic.version})
results = helper_test_http_method(client, 'patch', public_url, patch_data, users)
assert results == [401, 403, 403, 200, 200]
patch_data = json.dumps({"attributes_values": {str(data.private_epic_ca1.pk): "test"},
"version": data.private_epic1.version})
results = helper_test_http_method(client, 'patch', private_url1, patch_data, users)
assert results == [401, 403, 403, 200, 200]
patch_data = json.dumps({"attributes_values": {str(data.private_epic_ca2.pk): "test"},
"version": data.private_epic2.version})
results = helper_test_http_method(client, 'patch', private_url2, patch_data, users)
assert results == [401, 403, 403, 200, 200]
patch_data = json.dumps({"attributes_values": {str(data.blocked_epic_ca.pk): "test"},
"version": data.blocked_epic.version})
results = helper_test_http_method(client, 'patch', blocked_url, patch_data, users)
assert results == [401, 403, 403, 451, 451]
| 47.087146
| 113
| 0.655208
| 2,652
| 21,613
| 5.063348
| 0.080317
| 0.023235
| 0.064045
| 0.055109
| 0.879133
| 0.849568
| 0.79714
| 0.768097
| 0.7605
| 0.693327
| 0
| 0.041746
| 0.225281
| 21,613
| 458
| 114
| 47.189956
| 0.760213
| 0.043585
| 0
| 0.629944
| 0
| 0
| 0.090143
| 0.057729
| 0
| 0
| 0
| 0
| 0.129944
| 1
| 0.031073
| false
| 0
| 0.022599
| 0
| 0.056497
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 6
|
c3627cbdd37682cff7a59616869916d942dc1c7f
| 156
|
py
|
Python
|
komorebi/__init__.py
|
alvations/komorebi
|
ba9617cb0c89f8f038ac2a80bcdbc4c450d0a3dd
|
[
"MIT"
] | null | null | null |
komorebi/__init__.py
|
alvations/komorebi
|
ba9617cb0c89f8f038ac2a80bcdbc4c450d0a3dd
|
[
"MIT"
] | 2
|
2018-06-12T08:45:16.000Z
|
2018-06-13T08:39:53.000Z
|
komorebi/__init__.py
|
alvations/komorebi
|
ba9617cb0c89f8f038ac2a80bcdbc4c450d0a3dd
|
[
"MIT"
] | 2
|
2018-06-13T06:06:06.000Z
|
2020-02-08T04:16:05.000Z
|
# -*- coding: utf-8 -*-
from komorebi.text import TextData
from komorebi.parallel import ParallelData
from komorebi.torch_datasets import ParallelDataset
| 22.285714
| 51
| 0.801282
| 19
| 156
| 6.526316
| 0.684211
| 0.290323
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.007299
| 0.121795
| 156
| 6
| 52
| 26
| 0.89781
| 0.134615
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| true
| 0
| 1
| 0
| 1
| 0
| 1
| 0
| 0
| null | 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 0
| 1
| 0
|
0
| 6
|
6f06c9ced4fc97724343d3596b8685500f9fbd9f
| 124
|
py
|
Python
|
python/ql/test/query-tests/Imports/PyCheckerTests/pkg_ok/__init__.py
|
vadi2/codeql
|
a806a4f08696d241ab295a286999251b56a6860c
|
[
"MIT"
] | 4,036
|
2020-04-29T00:09:57.000Z
|
2022-03-31T14:16:38.000Z
|
python/ql/test/query-tests/Imports/PyCheckerTests/pkg_ok/__init__.py
|
vadi2/codeql
|
a806a4f08696d241ab295a286999251b56a6860c
|
[
"MIT"
] | 2,970
|
2020-04-28T17:24:18.000Z
|
2022-03-31T22:40:46.000Z
|
python/ql/test/query-tests/Imports/PyCheckerTests/pkg_ok/__init__.py
|
ScriptBox99/github-codeql
|
2ecf0d3264db8fb4904b2056964da469372a235c
|
[
"MIT"
] | 794
|
2020-04-29T00:28:25.000Z
|
2022-03-30T08:21:46.000Z
|
import pkg_ok.foo1 as foo1
from pkg_ok import foo2
from pkg_ok.foo3 import Foo3
from . import foo4
from .foo5 import Foo5
| 15.5
| 28
| 0.790323
| 24
| 124
| 3.958333
| 0.416667
| 0.157895
| 0.189474
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.078431
| 0.177419
| 124
| 7
| 29
| 17.714286
| 0.852941
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| true
| 0
| 1
| 0
| 1
| 0
| 1
| 0
| 0
| null | 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 0
| 1
| 0
|
0
| 6
|
6f1ff2f5d5b0320134dbaed8d99f478e6712f04c
| 13,342
|
py
|
Python
|
tests/steps/test_field.py
|
areleu/frictionless-py
|
d62ccf6efbe23020956d35ce396b3cef9af18d42
|
[
"MIT"
] | null | null | null |
tests/steps/test_field.py
|
areleu/frictionless-py
|
d62ccf6efbe23020956d35ce396b3cef9af18d42
|
[
"MIT"
] | null | null | null |
tests/steps/test_field.py
|
areleu/frictionless-py
|
d62ccf6efbe23020956d35ce396b3cef9af18d42
|
[
"MIT"
] | null | null | null |
from frictionless import Resource, transform, steps
# Add
def test_step_field_add():
source = Resource(path="data/transform.csv")
target = transform(
source,
steps=[
steps.field_add(name="note", type="string", value="eu"),
],
)
assert target.schema == {
"fields": [
{"name": "id", "type": "integer"},
{"name": "name", "type": "string"},
{"name": "population", "type": "integer"},
{"name": "note", "type": "string"},
]
}
assert target.read_rows() == [
{"id": 1, "name": "germany", "population": 83, "note": "eu"},
{"id": 2, "name": "france", "population": 66, "note": "eu"},
{"id": 3, "name": "spain", "population": 47, "note": "eu"},
]
def test_step_field_add_with_position():
source = Resource(path="data/transform.csv")
target = transform(
source,
steps=[
steps.field_add(name="note", position=1, value="eu"),
],
)
assert target.schema == {
"fields": [
{"name": "note"},
{"name": "id", "type": "integer"},
{"name": "name", "type": "string"},
{"name": "population", "type": "integer"},
]
}
assert target.read_rows() == [
{"note": "eu", "id": 1, "name": "germany", "population": 83},
{"note": "eu", "id": 2, "name": "france", "population": 66},
{"note": "eu", "id": 3, "name": "spain", "population": 47},
]
def test_step_field_add_with_formula():
source = Resource(path="data/transform.csv")
target = transform(
source,
steps=[
steps.table_normalize(),
steps.field_add(name="calc", formula="id * 100 + population"),
],
)
assert target.schema == {
"fields": [
{"name": "id", "type": "integer"},
{"name": "name", "type": "string"},
{"name": "population", "type": "integer"},
{"name": "calc"},
]
}
assert target.read_rows() == [
{"id": 1, "name": "germany", "population": 83, "calc": 183},
{"id": 2, "name": "france", "population": 66, "calc": 266},
{"id": 3, "name": "spain", "population": 47, "calc": 347},
]
def test_step_field_add_with_function():
source = Resource(path="data/transform.csv")
target = transform(
source,
steps=[
steps.table_normalize(),
steps.field_add(
name="calc", function=lambda row: row["id"] * 100 + row["population"]
),
],
)
assert target.schema == {
"fields": [
{"name": "id", "type": "integer"},
{"name": "name", "type": "string"},
{"name": "population", "type": "integer"},
{"name": "calc"},
]
}
assert target.read_rows() == [
{"id": 1, "name": "germany", "population": 83, "calc": 183},
{"id": 2, "name": "france", "population": 66, "calc": 266},
{"id": 3, "name": "spain", "population": 47, "calc": 347},
]
def test_step_field_add_with_incremental():
source = Resource(path="data/transform.csv")
target = transform(
source,
steps=[
steps.field_add(name="number", incremental=True),
],
)
assert target.schema == {
"fields": [
{"name": "number"},
{"name": "id", "type": "integer"},
{"name": "name", "type": "string"},
{"name": "population", "type": "integer"},
]
}
assert target.read_rows() == [
{"number": 1, "id": 1, "name": "germany", "population": 83},
{"number": 2, "id": 2, "name": "france", "population": 66},
{"number": 3, "id": 3, "name": "spain", "population": 47},
]
# Filter
def test_step_field_filter():
source = Resource(path="data/transform.csv")
target = transform(
source,
steps=[
steps.field_filter(names=["id", "name"]),
],
)
assert target.schema == {
"fields": [
{"name": "id", "type": "integer"},
{"name": "name", "type": "string"},
]
}
assert target.read_rows() == [
{"id": 1, "name": "germany"},
{"id": 2, "name": "france"},
{"id": 3, "name": "spain"},
]
# Move
def test_step_field_move():
source = Resource(path="data/transform.csv")
target = transform(
source,
steps=[
steps.field_move(name="id", position=3),
],
)
assert target.schema == {
"fields": [
{"name": "name", "type": "string"},
{"name": "population", "type": "integer"},
{"name": "id", "type": "integer"},
]
}
assert target.read_rows() == [
{"name": "germany", "population": 83, "id": 1},
{"name": "france", "population": 66, "id": 2},
{"name": "spain", "population": 47, "id": 3},
]
# Remove
def test_step_field_remove():
source = Resource(path="data/transform.csv")
target = transform(
source,
steps=[
steps.field_remove(names=["id"]),
],
)
assert target.schema == {
"fields": [
{"name": "name", "type": "string"},
{"name": "population", "type": "integer"},
]
}
assert target.read_rows() == [
{"name": "germany", "population": 83},
{"name": "france", "population": 66},
{"name": "spain", "population": 47},
]
# Split
def test_step_field_split():
source = Resource(path="data/transform.csv")
target = transform(
source,
steps=[
steps.field_split(name="name", to_names=["name1", "name2"], pattern="a"),
],
)
assert target.schema == {
"fields": [
{"name": "id", "type": "integer"},
{"name": "population", "type": "integer"},
{"name": "name1", "type": "string"},
{"name": "name2", "type": "string"},
]
}
assert target.read_rows() == [
{"id": 1, "population": 83, "name1": "germ", "name2": "ny"},
{"id": 2, "population": 66, "name1": "fr", "name2": "nce"},
{"id": 3, "population": 47, "name1": "sp", "name2": "in"},
]
def test_step_field_split_with_preserve():
source = Resource(path="data/transform.csv")
target = transform(
source,
steps=[
steps.field_split(
name="name", to_names=["name1", "name2"], pattern="a", preserve=True
),
],
)
assert target.schema == {
"fields": [
{"name": "id", "type": "integer"},
{"name": "name", "type": "string"},
{"name": "population", "type": "integer"},
{"name": "name1", "type": "string"},
{"name": "name2", "type": "string"},
]
}
assert target.read_rows() == [
{"id": 1, "name": "germany", "population": 83, "name1": "germ", "name2": "ny"},
{"id": 2, "name": "france", "population": 66, "name1": "fr", "name2": "nce"},
{"id": 3, "name": "spain", "population": 47, "name1": "sp", "name2": "in"},
]
def test_step_field_split_with_capturing_groups():
source = Resource(path="data/transform.csv")
target = transform(
source,
steps=[
steps.field_split(
name="name", to_names=["name1", "name2"], pattern=r"(.{2})(.*)"
),
],
)
assert target.schema == {
"fields": [
{"name": "id", "type": "integer"},
{"name": "population", "type": "integer"},
{"name": "name1", "type": "string"},
{"name": "name2", "type": "string"},
]
}
assert target.read_rows() == [
{"id": 1, "population": 83, "name1": "ge", "name2": "rmany"},
{"id": 2, "population": 66, "name1": "fr", "name2": "ance"},
{"id": 3, "population": 47, "name1": "sp", "name2": "ain"},
]
# Unpack
def test_step_field_unpack():
source = Resource(path="data/transform.csv")
target = transform(
source,
steps=[
steps.field_update(name="id", type="array", value=[1, 1]),
steps.field_unpack(name="id", to_names=["id2", "id3"]),
],
)
assert target.schema == {
"fields": [
{"name": "name", "type": "string"},
{"name": "population", "type": "integer"},
{"name": "id2"},
{"name": "id3"},
]
}
assert target.read_rows() == [
{"name": "germany", "population": 83, "id2": 1, "id3": 1},
{"name": "france", "population": 66, "id2": 1, "id3": 1},
{"name": "spain", "population": 47, "id2": 1, "id3": 1},
]
def test_step_field_unpack_with_preserve():
source = Resource(path="data/transform.csv")
target = transform(
source,
steps=[
steps.field_update(name="id", type="array", value=[1, 1]),
steps.field_unpack(name="id", to_names=["id2", "id3"], preserve=True),
],
)
assert target.schema == {
"fields": [
{"name": "id", "type": "array"},
{"name": "name", "type": "string"},
{"name": "population", "type": "integer"},
{"name": "id2"},
{"name": "id3"},
]
}
assert target.read_rows() == [
{"id": [1, 1], "name": "germany", "population": 83, "id2": 1, "id3": 1},
{"id": [1, 1], "name": "france", "population": 66, "id2": 1, "id3": 1},
{"id": [1, 1], "name": "spain", "population": 47, "id2": 1, "id3": 1},
]
def test_step_field_unpack_source_is_object():
source = Resource(path="data/transform.csv")
target = transform(
source,
steps=[
steps.field_update(name="id", type="object", value={"note": "eu"}),
steps.field_unpack(name="id", to_names=["note"]),
],
)
assert target.schema == {
"fields": [
{"name": "name", "type": "string"},
{"name": "population", "type": "integer"},
{"name": "note"},
]
}
assert target.read_rows() == [
{"name": "germany", "population": 83, "note": "eu"},
{"name": "france", "population": 66, "note": "eu"},
{"name": "spain", "population": 47, "note": "eu"},
]
# Update
def test_step_field_update():
source = Resource(path="data/transform.csv")
target = transform(
source,
steps=[
steps.field_update(name="id", type="string", function=str),
],
)
assert target.schema == {
"fields": [
{"name": "id", "type": "string"},
{"name": "name", "type": "string"},
{"name": "population", "type": "integer"},
]
}
assert target.read_rows() == [
{"id": "1", "name": "germany", "population": 83},
{"id": "2", "name": "france", "population": 66},
{"id": "3", "name": "spain", "population": 47},
]
def test_step_field_update_with_exact_value():
source = Resource(path="data/transform.csv")
target = transform(
source,
steps=[
steps.field_update(name="id", type="string", value="x"),
],
)
assert target.schema == {
"fields": [
{"name": "id", "type": "string"},
{"name": "name", "type": "string"},
{"name": "population", "type": "integer"},
]
}
assert target.read_rows() == [
{"id": "x", "name": "germany", "population": 83},
{"id": "x", "name": "france", "population": 66},
{"id": "x", "name": "spain", "population": 47},
]
def test_step_field_update_new_name():
source = Resource(path="data/transform.csv")
target = transform(
source,
steps=[
steps.field_update(name="id", new_name="new-name"),
],
)
assert target.schema == {
"fields": [
{"name": "new-name", "type": "integer"},
{"name": "name", "type": "string"},
{"name": "population", "type": "integer"},
]
}
assert target.read_rows() == [
{"new-name": 1, "name": "germany", "population": 83},
{"new-name": 2, "name": "france", "population": 66},
{"new-name": 3, "name": "spain", "population": 47},
]
# Issues
def test_transform_rename_move_field_issue_953():
target = transform(
data=[
{"id": 1, "name": "germany", "population": 83},
{"id": 2, "name": "france", "population": 66},
{"id": 3, "name": "spain", "population": 47},
],
steps=[
steps.table_normalize(),
steps.field_update(name="name", new_name="country"),
steps.field_move(name="country", position=3),
],
)
assert target.schema == {
"fields": [
{"name": "id", "type": "integer"},
{"name": "population", "type": "integer"},
{"name": "country", "type": "string"},
]
}
assert target.read_rows() == [
{"id": 1, "population": 83, "country": "germany"},
{"id": 2, "population": 66, "country": "france"},
{"id": 3, "population": 47, "country": "spain"},
]
| 29.78125
| 87
| 0.473018
| 1,305
| 13,342
| 4.735632
| 0.072797
| 0.069903
| 0.053398
| 0.069903
| 0.876375
| 0.842718
| 0.790291
| 0.767152
| 0.734304
| 0.70712
| 0
| 0.027685
| 0.306926
| 13,342
| 447
| 88
| 29.847875
| 0.64064
| 0.003673
| 0
| 0.563307
| 0
| 0
| 0.245559
| 0
| 0
| 0
| 0
| 0
| 0.093023
| 1
| 0.046512
| false
| 0
| 0.002584
| 0
| 0.049096
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 6
|
6f2dabc96e4906639c416de8dbcc00a6d02c964e
| 95
|
py
|
Python
|
m3u8dl/__init__.py
|
OliverVermeulen/m3u8-dl
|
16d9b43ea2eeb9ee113acad2540d74c4ae92f496
|
[
"MIT"
] | 46
|
2020-06-20T19:33:01.000Z
|
2022-03-22T03:24:41.000Z
|
m3u8dl/__init__.py
|
OliverVermeulen/m3u8-dl
|
16d9b43ea2eeb9ee113acad2540d74c4ae92f496
|
[
"MIT"
] | 38
|
2020-07-18T19:34:00.000Z
|
2021-11-23T12:05:38.000Z
|
m3u8dl/__init__.py
|
OliverVermeulen/m3u8-dl
|
16d9b43ea2eeb9ee113acad2540d74c4ae92f496
|
[
"MIT"
] | 22
|
2020-06-13T21:52:33.000Z
|
2022-03-06T11:44:42.000Z
|
"""Module starts the m3u8 program."""
from .core.m3u8dl import main
def start():
main()
| 11.875
| 37
| 0.652632
| 13
| 95
| 4.769231
| 0.923077
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.052632
| 0.2
| 95
| 7
| 38
| 13.571429
| 0.763158
| 0.326316
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0.333333
| true
| 0
| 0.333333
| 0
| 0.666667
| 0
| 1
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 1
| 1
| 0
| 1
| 0
| 1
| 0
|
0
| 6
|
6f457d869ce1dcc72e026b5610d872719de1d604
| 5,280
|
py
|
Python
|
dfirtrack_api/tests/dfirtrack_main/company/test_company_api_views.py
|
cclauss/dfirtrack
|
2a307c5fe82e927b3c229a20a02bc0c7a5d66d9a
|
[
"Apache-2.0"
] | null | null | null |
dfirtrack_api/tests/dfirtrack_main/company/test_company_api_views.py
|
cclauss/dfirtrack
|
2a307c5fe82e927b3c229a20a02bc0c7a5d66d9a
|
[
"Apache-2.0"
] | null | null | null |
dfirtrack_api/tests/dfirtrack_main/company/test_company_api_views.py
|
cclauss/dfirtrack
|
2a307c5fe82e927b3c229a20a02bc0c7a5d66d9a
|
[
"Apache-2.0"
] | null | null | null |
import urllib.parse
from django.contrib.auth.models import User
from django.test import TestCase
from dfirtrack_main.models import Company, Division
class CompanyAPIViewTestCase(TestCase):
""" company API view tests """
@classmethod
def setUpTestData(cls):
# create object
Division.objects.create(division_name='division_api_1')
# create object
Division.objects.create(division_name='division_api_2')
# create object
Company.objects.create(
company_name='company_api_1'
)
# create user
User.objects.create_user(username='testuser_company_api', password='tvjnIPBlhP9P3ixDHVE7')
def test_company_list_api_unauthorized(self):
""" unauthorized access is forbidden"""
# get response
response = self.client.get('/api/company/')
# compare
self.assertEqual(response.status_code, 401)
def test_company_list_api_method_get(self):
""" GET is allowed """
# login testuser
self.client.login(username='testuser_company_api', password='tvjnIPBlhP9P3ixDHVE7')
# get response
response = self.client.get('/api/company/')
# compare
self.assertEqual(response.status_code, 200)
def test_company_list_api_method_post(self):
""" POST is allowed """
# get object
division_id = str(Division.objects.get(division_name='division_api_2').division_id)
# login testuser
self.client.login(username='testuser_company_api', password='tvjnIPBlhP9P3ixDHVE7')
# create POST string
poststring = {
"company_name": "company_api_2",
"division": division_id,
}
# get response
response = self.client.post('/api/company/', data=poststring)
# compare
self.assertEqual(response.status_code, 201)
def test_company_list_api_redirect(self):
""" test redirect with appending slash """
# login testuser
self.client.login(username='testuser_company_api', password='tvjnIPBlhP9P3ixDHVE7')
# create url
destination = urllib.parse.quote('/api/company/', safe='/')
# get response
response = self.client.get('/api/company', follow=True)
# compare
self.assertRedirects(response, destination, status_code=301, target_status_code=200)
def test_company_detail_api_unauthorized (self):
""" unauthorized access is forbidden"""
# get object
company_api_1 = Company.objects.get(company_name='company_api_1')
# get response
response = self.client.get('/api/company/' + str(company_api_1.company_id) + '/')
# compare
self.assertEqual(response.status_code, 401)
def test_company_detail_api_method_get(self):
""" GET is allowed """
# get object
company_api_1 = Company.objects.get(company_name='company_api_1')
# login testuser
self.client.login(username='testuser_company_api', password='tvjnIPBlhP9P3ixDHVE7')
# get response
response = self.client.get('/api/company/' + str(company_api_1.company_id) + '/')
# compare
self.assertEqual(response.status_code, 200)
def test_company_detail_api_method_delete(self):
""" DELETE is forbidden """
# get object
company_api_1 = Company.objects.get(company_name='company_api_1')
# login testuser
self.client.login(username='testuser_company_api', password='tvjnIPBlhP9P3ixDHVE7')
# get response
response = self.client.delete('/api/company/' + str(company_api_1.company_id) + '/')
# compare
self.assertEqual(response.status_code, 405)
def test_company_detail_api_method_put(self):
""" PUT is allowed """
# get object
division_id = str(Division.objects.get(division_name='division_api_1').division_id)
# get object
company_api_1 = Company.objects.get(company_name='company_api_1')
# login testuser
self.client.login(username='testuser_company_api', password='tvjnIPBlhP9P3ixDHVE7')
# create url
destination = urllib.parse.quote('/api/company/' + str(company_api_1.company_id) + '/', safe='/')
# create PUT string
putstring = {
"company_name": "new_company_api_1",
"division": division_id,
}
# get response
response = self.client.put(destination, data=putstring, content_type='application/json')
# compare
self.assertEqual(response.status_code, 200)
def test_company_detail_api_redirect(self):
""" test redirect with appending slash """
# get object
company_api_1 = Company.objects.get(company_name='company_api_1')
# login testuser
self.client.login(username='testuser_company_api', password='tvjnIPBlhP9P3ixDHVE7')
# create url
destination = urllib.parse.quote('/api/company/' + str(company_api_1.company_id) + '/', safe='/')
# get response
response = self.client.get('/api/company/' + str(company_api_1.company_id), follow=True)
# compare
self.assertRedirects(response, destination, status_code=301, target_status_code=200)
| 37.446809
| 105
| 0.656061
| 592
| 5,280
| 5.608108
| 0.138514
| 0.084337
| 0.059639
| 0.059639
| 0.84488
| 0.826205
| 0.780422
| 0.763554
| 0.67741
| 0.629819
| 0
| 0.019802
| 0.234848
| 5,280
| 140
| 106
| 37.714286
| 0.80198
| 0.14072
| 0
| 0.415385
| 0
| 0
| 0.155546
| 0
| 0
| 0
| 0
| 0
| 0.138462
| 1
| 0.153846
| false
| 0.123077
| 0.061538
| 0
| 0.230769
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
|
0
| 6
|
6f4c0bd65073bc1b8bfc793ed654204db8b97d92
| 35
|
py
|
Python
|
test/login.py
|
zedjax/test
|
500a7b66f7ef3d5d2dace90342cd10135bc3b7e5
|
[
"MIT"
] | null | null | null |
test/login.py
|
zedjax/test
|
500a7b66f7ef3d5d2dace90342cd10135bc3b7e5
|
[
"MIT"
] | null | null | null |
test/login.py
|
zedjax/test
|
500a7b66f7ef3d5d2dace90342cd10135bc3b7e5
|
[
"MIT"
] | null | null | null |
a = 1
b = 2
c = a + b
d = '668'
| 4.375
| 9
| 0.314286
| 9
| 35
| 1.222222
| 0.777778
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.277778
| 0.485714
| 35
| 7
| 10
| 5
| 0.333333
| 0
| 0
| 0
| 0
| 0
| 0.085714
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| false
| 0
| 0
| 0
| 0
| 0
| 1
| 1
| 1
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 1
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 6
|
489ea1d7c8fa654522ed9a8b784c2879f6325817
| 8,484
|
py
|
Python
|
intro-ansible/venv3/lib/python3.8/site-packages/ansible_collections/sensu/sensu_go/tests/unit/modules/test_datastore.py
|
Stienvdh/statrick
|
7b092fc42171e226718a70a285a4b323f2f395ad
|
[
"MIT"
] | null | null | null |
intro-ansible/venv3/lib/python3.8/site-packages/ansible_collections/sensu/sensu_go/tests/unit/modules/test_datastore.py
|
Stienvdh/statrick
|
7b092fc42171e226718a70a285a4b323f2f395ad
|
[
"MIT"
] | null | null | null |
intro-ansible/venv3/lib/python3.8/site-packages/ansible_collections/sensu/sensu_go/tests/unit/modules/test_datastore.py
|
Stienvdh/statrick
|
7b092fc42171e226718a70a285a4b323f2f395ad
|
[
"MIT"
] | null | null | null |
from __future__ import absolute_import, division, print_function
__metaclass__ = type
import pytest
from ansible_collections.sensu.sensu_go.plugins.module_utils import (
errors, http,
)
from ansible_collections.sensu.sensu_go.plugins.modules import datastore
from .common.utils import (
AnsibleExitJson, AnsibleFailJson, ModuleTestCase, set_module_args,
)
class TestSync(ModuleTestCase):
def test_absent_no_current_object(self, mocker):
client = mocker.Mock()
client.get.return_value = http.Response(404, "")
changed, object = datastore.sync(
"absent", client, "/list", "/resource", {}, False,
)
assert changed is False
assert object is None
def test_absent_no_current_object_check(self, mocker):
client = mocker.Mock()
client.get.return_value = http.Response(404, "")
changed, object = datastore.sync(
"absent", client, "/list", "/resource", {}, True,
)
assert changed is False
assert object is None
def test_absent_current_object_present(self, mocker):
client = mocker.Mock()
client.get.return_value = http.Response(200, '{}')
client.delete.return_value = http.Response(204, "")
changed, object = datastore.sync(
"absent", client, "/list", "/resource", {}, False,
)
assert changed is True
assert object is None
client.delete.assert_called_with("/resource")
def test_absent_current_object_present_check(self, mocker):
client = mocker.Mock()
client.get.return_value = http.Response(200, '{}')
client.delete.return_value = http.Response(204, "")
changed, object = datastore.sync(
"absent", client, "/list", "/resource", {}, True,
)
assert changed is True
assert object is None
client.delete.assert_not_called()
def test_present_current_object_differ(self, mocker):
client = mocker.Mock()
client.get.side_effect = (
http.Response(200, '{"spec": {"current": "data"}}'),
http.Response(200, '{"spec": {"new": "data"}}'),
)
client.put.return_value = http.Response(201, "")
changed, object = datastore.sync(
"present", client, "/list", "/resource", {"spec": {"my": "data"}},
False,
)
assert changed is True
assert {"new": "data"} == object
client.put.assert_called_once_with(
"/resource", {"spec": {"my": "data"}},
)
def test_present_current_object_differ_check(self, mocker):
client = mocker.Mock()
client.get.return_value = (
http.Response(200, '{"spec": {"current": "data"}}')
)
changed, object = datastore.sync(
"present", client, "/list", "/resource", {"spec": {"my": "data"}},
True,
)
assert changed is True
assert {"my": "data"} == object
client.put.assert_not_called()
def test_present_current_object_does_not_differ(self, mocker):
client = mocker.Mock()
client.get.return_value = (
http.Response(200, '{"spec": {"my": "data"}}')
)
changed, object = datastore.sync(
"present", client, "/list", "/resource", {"spec": {"my": "data"}},
False,
)
assert changed is False
assert {"my": "data"} == object
client.put.assert_not_called()
def test_present_current_object_does_not_differ_check(self, mocker):
client = mocker.Mock()
client.get.return_value = (
http.Response(200, '{"spec": {"my": "data"}}')
)
changed, object = datastore.sync(
"present", client, "/list", "/resource", {"spec": {"my": "data"}},
True,
)
assert changed is False
assert {"my": "data"} == object
client.put.assert_not_called()
def test_present_no_current_object_empty_backend(self, mocker):
client = mocker.Mock()
client.get.side_effect = (
http.Response(404, ""),
http.Response(200, "[]"),
http.Response(200, '{"spec": {"new": "data"}}'),
)
client.put.return_value = http.Response(201, "")
changed, object = datastore.sync(
"present", client, "/list", "/resource", {"spec": {"my": "data"}},
False,
)
assert changed is True
assert {"new": "data"} == object
client.put.assert_called_once_with(
"/resource", {"spec": {"my": "data"}},
)
def test_present_no_current_object_empty_backend_check(self, mocker):
client = mocker.Mock()
client.get.side_effect = (
http.Response(404, ""),
http.Response(200, "[]"),
)
changed, object = datastore.sync(
"present", client, "/list", "/resource", {"spec": {"my": "data"}},
True,
)
assert changed is True
assert {"my": "data"} == object
client.put.assert_not_called()
@pytest.mark.parametrize("check", [False, True])
def test_present_no_current_object_non_empty_backend(self, mocker, check):
client = mocker.Mock()
client.get.side_effect = (
http.Response(404, ""),
http.Response(200, "[{}]"),
)
with pytest.raises(errors.Error, match="already active"):
datastore.sync(
"present", client, "/list", "/resource",
{"spec": {"my": "data"}}, check,
)
client.put.assert_not_called()
class TestDatastore(ModuleTestCase):
def test_minimal_datastore_parameters_present(self, mocker):
sync_mock = mocker.patch.object(datastore, "sync")
sync_mock.return_value = True, {}
set_module_args(
name="test_datastore",
dsn="my-dsn",
)
with pytest.raises(AnsibleExitJson):
datastore.main()
state, _client, list_path, resource_path, payload, check_mode = (
sync_mock.call_args[0]
)
assert state == "present"
assert resource_path == "/api/enterprise/store/v1/provider/test_datastore"
assert list_path == "/api/enterprise/store/v1/provider"
assert payload == dict(
type="PostgresConfig",
api_version="store/v1",
metadata=dict(name="test_datastore"),
spec=dict(dsn="my-dsn"),
)
assert check_mode is False
def test_minimal_datastore_parameters_absent(self, mocker):
sync_mock = mocker.patch.object(datastore, "sync")
sync_mock.return_value = True, {}
set_module_args(
name="test_datastore",
state="absent",
)
with pytest.raises(AnsibleExitJson):
datastore.main()
state, _client, list_path, resource_path, _payload, check_mode = (
sync_mock.call_args[0]
)
assert state == "absent"
assert resource_path == "/api/enterprise/store/v1/provider/test_datastore"
assert list_path == "/api/enterprise/store/v1/provider"
assert check_mode is False
def test_all_datastore_parameters(self, mocker):
sync_mock = mocker.patch.object(datastore, "sync")
sync_mock.return_value = True, {}
set_module_args(
name="test_datastore",
dsn="my-dsn",
pool_size=543,
)
with pytest.raises(AnsibleExitJson):
datastore.main()
state, _client, list_path, resource_path, payload, check_mode = (
sync_mock.call_args[0]
)
assert state == "present"
assert resource_path == "/api/enterprise/store/v1/provider/test_datastore"
assert list_path == "/api/enterprise/store/v1/provider"
assert payload == dict(
type="PostgresConfig",
api_version="store/v1",
metadata=dict(name="test_datastore"),
spec=dict(dsn="my-dsn", pool_size=543),
)
assert check_mode is False
def test_failure(self, mocker):
sync_mock = mocker.patch.object(datastore, "sync")
sync_mock.side_effect = errors.Error("Bad error")
set_module_args(
name="test_datastore",
dsn="my-dsn",
)
with pytest.raises(AnsibleFailJson):
datastore.main()
| 32.136364
| 82
| 0.576497
| 896
| 8,484
| 5.252232
| 0.122768
| 0.050999
| 0.056524
| 0.051424
| 0.891415
| 0.873778
| 0.834467
| 0.79983
| 0.778368
| 0.768168
| 0
| 0.012769
| 0.28925
| 8,484
| 263
| 83
| 32.258555
| 0.767662
| 0
| 0
| 0.652582
| 0
| 0
| 0.120344
| 0.028642
| 0
| 0
| 0
| 0
| 0.201878
| 1
| 0.070423
| false
| 0
| 0.023474
| 0
| 0.103286
| 0.004695
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 6
|
48a03431d2911d22cbd996d83abbfe3ae03b7724
| 80
|
py
|
Python
|
nes/bus/devices/apu/sq2_lo.py
|
Hexadorsimal/pynes
|
dbb3d40c1240fa27f70fa798bcec09188755eec2
|
[
"MIT"
] | 1
|
2017-05-13T18:57:09.000Z
|
2017-05-13T18:57:09.000Z
|
nes/bus/devices/apu/sq2_lo.py
|
Hexadorsimal/py6502
|
dbb3d40c1240fa27f70fa798bcec09188755eec2
|
[
"MIT"
] | 7
|
2020-10-24T17:16:56.000Z
|
2020-11-01T14:10:23.000Z
|
nes/bus/devices/apu/sq2_lo.py
|
Hexadorsimal/pynes
|
dbb3d40c1240fa27f70fa798bcec09188755eec2
|
[
"MIT"
] | null | null | null |
from nes.processors.registers import Register
class Sq2Lo(Register):
pass
| 13.333333
| 45
| 0.775
| 10
| 80
| 6.2
| 0.9
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.014925
| 0.1625
| 80
| 5
| 46
| 16
| 0.910448
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| true
| 0.333333
| 0.333333
| 0
| 0.666667
| 0
| 1
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 1
| 1
| 0
| 1
| 0
|
0
| 6
|
48a7834ccb8e3ca4aba0878cc3df94e60db56b34
| 2,436
|
py
|
Python
|
tensorplane/core/lib/attrs.py
|
jyhanna/tensorplane
|
ac2679cec18ccb3f5b1ba353e7e53e669e115c3f
|
[
"BSD-3-Clause"
] | 1
|
2020-05-06T18:12:35.000Z
|
2020-05-06T18:12:35.000Z
|
tensorplane/core/lib/attrs.py
|
jyhanna/tensorplane
|
ac2679cec18ccb3f5b1ba353e7e53e669e115c3f
|
[
"BSD-3-Clause"
] | null | null | null |
tensorplane/core/lib/attrs.py
|
jyhanna/tensorplane
|
ac2679cec18ccb3f5b1ba353e7e53e669e115c3f
|
[
"BSD-3-Clause"
] | null | null | null |
import sys
class NullAssignment(object):
"""
"""
def __init__(self):
raise Exception('Cannot initialize null assignment, use type as flag.')
def _UndefinedAttribute(x):
return AttributeError('Attempted to access undefined dataset attribute {}'.format(x))
class UndefinedAttribute(object):
"""
"""
def __init__(self, attr):
self.value_ = attr
def __getattribute__(self, name):
if name == 'value_':
return object.__getattribute__(self, name)
else:
raise _UndefinedAttribute(self.value_)
def _raise_error(self, *args, **kwargs):
raise _UndefinedAttribute(self.value_)
def __getitem__(self, *args, **kwargs):
raise _UndefinedAttribute(self.value_)
def __setitem__(self, *args, **kwargs):
raise _UndefinedAttribute(self.value_)
def __truediv__(self, o):
raise _UndefinedAttribute(self.value_)
def __floordiv__(self, o):
raise _UndefinedAttribute(self.value_)
def __add__(self, o):
raise _UndefinedAttribute(self.value_)
def __sub__(self, o):
raise _UndefinedAttribute(self.value_)
def __mul__(self, o):
raise _UndefinedAttribute(self.value_)
def __mod__(self, o):
raise _UndefinedAttribute(self.value_)
def __pow__(self, o):
raise _UndefinedAttribute(self.value_)
def __lt__(self, o):
raise _UndefinedAttribute(self.value_)
def __gt__(self, o):
raise _UndefinedAttribute(self.value_)
def __le__(self, o):
raise _UndefinedAttribute(self.value_)
def __ge__(self, o):
raise _UndefinedAttribute(self.value_)
def __ne__(self, o):
raise _UndefinedAttribute(self.value_)
def __neg__(self, o):
raise _UndefinedAttribute(self.value_)
def __pos__(self, o):
raise _UndefinedAttribute(self.value_)
def __invert__(self, o):
raise _UndefinedAttribute(self.value_)
def __eq__(self, o):
raise _UndefinedAttribute(self.value_)
def __isub__(self, o):
raise _UndefinedAttribute(self.value_)
def __iadd__(self, o):
raise _UndefinedAttribute(self.value_)
def __imul__(self, o):
raise _UndefinedAttribute(self.value_)
def __idiv__(self, o):
raise _UndefinedAttribute(self.value_)
def __imod__(self, o):
raise _UndefinedAttribute(self.value_)
def __ipow__(self, o):
raise _UndefinedAttribute(self.value_)
def __ifloordiv__(self, o):
raise _UndefinedAttribute(self.value_)
def __str__(self, *args, **kwargs):
raise _UndefinedAttribute(self.value_)
def __repr__(self, *args, **kwargs):
raise _UndefinedAttribute(self.value_)
| 22.145455
| 86
| 0.747947
| 288
| 2,436
| 5.677083
| 0.229167
| 0.165138
| 0.478899
| 0.567584
| 0.73211
| 0.710703
| 0.710703
| 0.119878
| 0
| 0
| 0
| 0
| 0.137931
| 2,436
| 109
| 87
| 22.348624
| 0.778571
| 0
| 0
| 0.414286
| 0
| 0
| 0.044628
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0.457143
| false
| 0
| 0.014286
| 0.014286
| 0.528571
| 0
| 0
| 0
| 0
| null | 0
| 1
| 1
| 0
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 1
| 0
|
0
| 6
|
48ef1378fbe74c9b26b5e5d01951ce3bf0893c95
| 38,045
|
py
|
Python
|
greykite/tests/framework/templates/test_silverkite_multistage_template.py
|
kenzie-q/greykite
|
9fb670b5325dd252759a490b31aa4d9494fdd68b
|
[
"BSD-2-Clause"
] | 1,503
|
2021-05-13T02:43:42.000Z
|
2022-03-28T22:48:34.000Z
|
greykite/tests/framework/templates/test_silverkite_multistage_template.py
|
kenzie-q/greykite
|
9fb670b5325dd252759a490b31aa4d9494fdd68b
|
[
"BSD-2-Clause"
] | 59
|
2021-05-15T07:23:18.000Z
|
2022-03-31T18:52:00.000Z
|
greykite/tests/framework/templates/test_silverkite_multistage_template.py
|
kenzie-q/greykite
|
9fb670b5325dd252759a490b31aa4d9494fdd68b
|
[
"BSD-2-Clause"
] | 67
|
2021-05-13T02:44:04.000Z
|
2022-03-25T04:08:12.000Z
|
import datetime
import numpy as np
import pytest
from testfixtures import LogCapture
from greykite.common.constants import LOGGER_NAME
from greykite.common.constants import PREDICTED_COL
from greykite.common.constants import PREDICTED_LOWER_COL
from greykite.common.constants import PREDICTED_UPPER_COL
from greykite.common.constants import TIME_COL
from greykite.common.constants import VALUE_COL
from greykite.common.testing_utils import generate_df_for_tests
from greykite.framework.templates.autogen.forecast_config import EvaluationPeriodParam
from greykite.framework.templates.autogen.forecast_config import ForecastConfig
from greykite.framework.templates.autogen.forecast_config import MetadataParam
from greykite.framework.templates.autogen.forecast_config import ModelComponentsParam
from greykite.framework.templates.forecaster import Forecaster
from greykite.framework.templates.silverkite_multistage_template import SilverkiteMultistageTemplate
from greykite.framework.templates.silverkite_multistage_template_config import SILVERKITE_TWO_STAGE
from greykite.framework.templates.silverkite_multistage_template_config import SilverkiteMultistageTemplateConfig
from greykite.framework.templates.simple_silverkite_template import SimpleSilverkiteTemplate
from greykite.sklearn.estimator.simple_silverkite_estimator import SimpleSilverkiteEstimator
from greykite.sklearn.uncertainty.uncertainty_methods import UncertaintyMethodEnum
@pytest.fixture
def df():
df = generate_df_for_tests(
freq="H",
periods=24 * 7 * 8,
train_start_date=datetime.datetime(2018, 1, 1),
conti_year_origin=2018)["df"]
df["regressor"] = np.arange(len(df))
return df
@pytest.fixture
def silverkite_multistage_configs():
configs = [
SilverkiteMultistageTemplateConfig(
train_length="30D",
fit_length=None,
agg_func="nanmean",
agg_freq="D",
model_template="SILVERKITE",
model_components=ModelComponentsParam(
seasonality={
"yearly_seasonality": 12,
"quarterly_seasonality": 5,
"monthly_seasonality": 5,
"weekly_seasonality": 4,
"daily_seasonality": 0,
},
growth={
"growth_term": "linear"
},
events={
"holidays_to_model_separately": "auto",
"holiday_lookup_countries": "auto",
"holiday_pre_num_days": 1,
"holiday_post_num_days": 1,
"holiday_pre_post_num_dict": None,
"daily_event_df_dict": None,
},
changepoints={
"changepoints_dict": None,
"seasonality_changepoints_dict": None
},
autoregression={
"autoreg_dict": "auto"
},
regressors={
"regressor_cols": []
},
lagged_regressors={
"lagged_regressor_dict": None
},
uncertainty={
"uncertainty_dict": None
},
custom={
"fit_algorithm_dict": {
"fit_algorithm": "ridge",
"fit_algorithm_params": None,
},
"feature_sets_enabled": "auto", # "auto" based on data freq and size
"max_daily_seas_interaction_order": 0,
"max_weekly_seas_interaction_order": 2,
"extra_pred_cols": [],
"min_admissible_value": None,
"max_admissible_value": None,
}
)
),
SilverkiteMultistageTemplateConfig(
train_length="7D",
fit_length=None,
agg_func="nanmean",
agg_freq=None,
model_template="SILVERKITE",
model_components=ModelComponentsParam(
seasonality={
"yearly_seasonality": 0,
"quarterly_seasonality": 0,
"monthly_seasonality": 0,
"weekly_seasonality": 0,
"daily_seasonality": 12,
},
growth={
"growth_term": None
},
events={
"holidays_to_model_separately": [],
"holiday_lookup_countries": [],
"holiday_pre_num_days": 0,
"holiday_post_num_days": 0,
"holiday_pre_post_num_dict": None,
"daily_event_df_dict": None,
},
changepoints={
"changepoints_dict": None,
"seasonality_changepoints_dict": None
},
autoregression={
"autoreg_dict": "auto"
},
regressors={
"regressor_cols": []
},
lagged_regressors={
"lagged_regressor_dict": None
},
uncertainty={
"uncertainty_dict": None
},
custom={
"fit_algorithm_dict": {
"fit_algorithm": "ridge",
"fit_algorithm_params": None,
},
"feature_sets_enabled": "auto", # "auto" based on data freq and size
"max_daily_seas_interaction_order": 5,
"max_weekly_seas_interaction_order": 2,
"extra_pred_cols": [],
"min_admissible_value": None,
"max_admissible_value": None,
}
)
)
]
return configs
@pytest.fixture
def forecast_config(silverkite_multistage_configs):
forecast_config = ForecastConfig(
model_template="SILVERKITE_TWO_STAGE",
forecast_horizon=12,
metadata_param=MetadataParam(
time_col=TIME_COL,
value_col=VALUE_COL,
freq="H"
),
model_components_param=ModelComponentsParam(
custom=dict(
silverkite_multistage_configs=silverkite_multistage_configs
)
),
evaluation_period_param=EvaluationPeriodParam(
cv_max_splits=1,
cv_horizon=12,
test_horizon=12
)
)
return forecast_config
def test_get_regressor_cols(df, forecast_config):
"""Tests the `self.get_regressor_cols` method."""
template = SilverkiteMultistageTemplate()
df["reg1"] = 1
df["reg2"] = 2
template.df = df
template.config = forecast_config
forecast_config.model_components_param.custom[
"silverkite_multistage_configs"][0].model_components.regressors["regressor_cols"] = ["reg1"]
forecast_config.model_components_param.custom[
"silverkite_multistage_configs"][1].model_components.regressors["regressor_cols"] = ["reg2"]
regressor_cols = template.get_regressor_cols()
assert set(regressor_cols) == {"reg1", "reg2"}
def test_get_lagged_regressor_info(df, forecast_config):
template = SilverkiteMultistageTemplate()
df["reg1"] = 1
df["reg2"] = 2
template.df = df
template.config = forecast_config
forecast_config.model_components_param.custom[
"silverkite_multistage_configs"][0].model_components.lagged_regressors["lagged_regressor_dict"] = [{
"reg1": {
"lag_dict": {"orders": [12]},
"series_na_fill_func": lambda s: s.bfill().ffill()}
}]
forecast_config.model_components_param.custom[
"silverkite_multistage_configs"][1].model_components.lagged_regressors["lagged_regressor_dict"] = [{
"reg2": {
"lag_dict": {"orders": [12]},
"series_na_fill_func": lambda s: s.bfill().ffill()}
}]
lagged_regressor_info = template.get_lagged_regressor_info()
assert lagged_regressor_info == dict(
lagged_regressor_cols=["reg1", "reg2"],
overall_min_lag_order=12.0,
overall_max_lag_order=12.0
)
def test_get_hyperparameter_grid(df, forecast_config):
template = SilverkiteMultistageTemplate()
# Error when `self.config` is not available.
with pytest.raises(
ValueError,
match="Forecast config must be provided"):
template.get_hyperparameter_grid()
template.df = df
# Adds a list of length 2 to each submodel.
# The result hyperparameter grid should have 2 * 2 = 4 grids.
forecast_config.model_components_param.custom[
"silverkite_multistage_configs"][0].model_components.seasonality["weekly_seasonality"] = [1, 2]
forecast_config.model_components_param.custom[
"silverkite_multistage_configs"][1].model_components.seasonality["daily_seasonality"] = [10, 12]
template.config = forecast_config
hyperparameter_grid = template.get_hyperparameter_grid()
assert hyperparameter_grid["estimator__forecast_horizon"] == [12]
assert hyperparameter_grid["estimator__freq"] == ["H"]
assert len(hyperparameter_grid["estimator__model_configs"]) == 4
assert hyperparameter_grid["estimator__model_configs"][0][0].estimator_params["weekly_seasonality"] == 1
assert hyperparameter_grid["estimator__model_configs"][0][1].estimator_params["daily_seasonality"] == 10
assert hyperparameter_grid["estimator__model_configs"][1][0].estimator_params["weekly_seasonality"] == 1
assert hyperparameter_grid["estimator__model_configs"][1][1].estimator_params["daily_seasonality"] == 12
assert hyperparameter_grid["estimator__model_configs"][2][0].estimator_params["weekly_seasonality"] == 2
assert hyperparameter_grid["estimator__model_configs"][2][1].estimator_params["daily_seasonality"] == 10
assert hyperparameter_grid["estimator__model_configs"][3][0].estimator_params["weekly_seasonality"] == 2
assert hyperparameter_grid["estimator__model_configs"][3][1].estimator_params["daily_seasonality"] == 12
def test_get_hyperparameter_grid_same_template(df, forecast_config):
# Tests the behavior of using the same ``model_template`` to override.
template = SilverkiteMultistageTemplate()
template.df = df
# Sets weekly seasonality to 5.
forecast_config.model_components_param.custom[
"silverkite_multistage_configs"][1].model_components.seasonality["weekly_seasonality"] = 5
# Removes the daily seasonality specification.
del forecast_config.model_components_param.custom[
"silverkite_multistage_configs"][1].model_components.seasonality["daily_seasonality"]
template.config = forecast_config
hyperparameter_grid = template.get_hyperparameter_grid()
# The original template has daily seasonality 12 and no weekly seasonality.
# The second model was overriden with the same ``model_template``, which is ``SILVERKITE``,
# so the hyperparameter_grid should have both daily seasonality 12 and weekly seasonality 5.
assert hyperparameter_grid["estimator__model_configs"][0][1].estimator_params["daily_seasonality"] == 12
assert hyperparameter_grid["estimator__model_configs"][0][1].estimator_params["weekly_seasonality"] == 5
def test_get_hyperparameter_grid_different_template(df, forecast_config):
# Tests the behavior of using the different ``model_template`` to override.
template = SilverkiteMultistageTemplate()
template.df = df
# Sets the model template to be ``SILVERKITE_EMPTY``.
forecast_config.model_components_param.custom[
"silverkite_multistage_configs"][1].model_template = "SILVERKITE_EMPTY"
# Sets weekly seasonality to 5.
forecast_config.model_components_param.custom[
"silverkite_multistage_configs"][1].model_components.seasonality["weekly_seasonality"] = 5
# Removes the daily seasonality specification.
del forecast_config.model_components_param.custom[
"silverkite_multistage_configs"][1].model_components.seasonality["daily_seasonality"]
template.config = forecast_config
hyperparameter_grid = template.get_hyperparameter_grid()
# The original template has daily seasonality 12 and no weekly seasonality.
# The second model was overriden with a different ``model_template``, which is ``SILVERKITE_EMPTY``,
# so the hyperparameter_grid should have only weekly seasonality 5 and daily seasonality 0.
assert hyperparameter_grid["estimator__model_configs"][0][1].estimator_params["daily_seasonality"] == 0
assert hyperparameter_grid["estimator__model_configs"][0][1].estimator_params["weekly_seasonality"] == 5
def test_get_hyperparameter_grid_extra_configs(df, forecast_config):
"""Tests gets hyperparameter grid when the default and override have different lengths."""
# The empty template has no configs.
# The override components has two configs.
forecast_config.model_template = "SILVERKITE_MULTISTAGE_EMPTY"
template = SilverkiteMultistageTemplate()
template.df = df
template.config = forecast_config
# The grid should have exactly two configs which are the same as the override configs.
hyperparameter_grid = template.get_hyperparameter_grid()
assert hyperparameter_grid["estimator__model_configs"][0][0].estimator_params == {
'yearly_seasonality': 12,
'quarterly_seasonality': 5,
'monthly_seasonality': 5,
'weekly_seasonality': 4,
'daily_seasonality': 0,
'growth_term': 'linear',
'changepoints_dict': None,
'seasonality_changepoints_dict': None,
'holidays_to_model_separately': 'auto',
'holiday_lookup_countries': 'auto',
'holiday_pre_num_days': 1,
'holiday_post_num_days': 1,
'holiday_pre_post_num_dict': None,
'daily_event_df_dict': None,
'feature_sets_enabled': 'auto',
'fit_algorithm_dict': {
'fit_algorithm': 'ridge',
'fit_algorithm_params': None},
'max_daily_seas_interaction_order': 0,
'max_weekly_seas_interaction_order': 2,
'extra_pred_cols': [],
'drop_pred_cols': None,
'explicit_pred_cols': None,
'min_admissible_value': None,
'max_admissible_value': None,
'autoreg_dict': 'auto',
'simulation_num': 10,
'normalize_method': None,
'regressor_cols': [],
'lagged_regressor_dict': None,
'regression_weight_col': None,
'uncertainty_dict': None,
'origin_for_time_vars': None,
'train_test_thresh': None,
'training_fraction': None}
assert hyperparameter_grid["estimator__model_configs"][0][1].estimator_params == {
'yearly_seasonality': 0,
'quarterly_seasonality': 0,
'monthly_seasonality': 0,
'weekly_seasonality': 0,
'daily_seasonality': 12,
'growth_term': None,
'changepoints_dict': None,
'seasonality_changepoints_dict': None,
'holidays_to_model_separately': [],
'holiday_lookup_countries': [],
'holiday_pre_num_days': 0,
'holiday_post_num_days': 0,
'holiday_pre_post_num_dict': None,
'daily_event_df_dict': None,
'feature_sets_enabled': 'auto',
'fit_algorithm_dict': {
'fit_algorithm': 'ridge',
'fit_algorithm_params': None},
'max_daily_seas_interaction_order': 5,
'max_weekly_seas_interaction_order': 2,
'extra_pred_cols': [],
'drop_pred_cols': None,
'explicit_pred_cols': None,
'min_admissible_value': None,
'max_admissible_value': None,
'normalize_method': None,
'autoreg_dict': 'auto',
'simulation_num': 10,
'regressor_cols': [],
'lagged_regressor_dict': None,
'regression_weight_col': None,
'uncertainty_dict': None,
'origin_for_time_vars': None,
'train_test_thresh': None,
'training_fraction': None}
def test_get_silverkite_multistage_configs_override(df, forecast_config):
template = SilverkiteMultistageTemplate()
template.df = df
# Adds a list of length 2 to each submodel.
# The result hyperparameter grid should have 2 * 2 = 4 grids.
forecast_config.model_components_param.custom[
"silverkite_multistage_configs"][0].model_components.seasonality["weekly_seasonality"] = [1, 2]
forecast_config.model_components_param.custom[
"silverkite_multistage_configs"][1].model_components.seasonality["daily_seasonality"] = [10, 12]
template.config = forecast_config
default_model_components = template._SilverkiteMultistageTemplate__get_default_model_components(
forecast_config.model_template)
default_silverkite_multistage_configs = default_model_components.custom.get("silverkite_multistage_configs")
new_configs = template._SilverkiteMultistageTemplate__get_silverkite_multistage_configs_override(
custom=forecast_config.model_components_param.custom,
model_template="SILVERKITE_TWO_STAGE",
default_silverkite_multistage_configs=default_silverkite_multistage_configs
)
assert new_configs == [
SilverkiteMultistageTemplateConfig(
train_length='30D',
fit_length=None,
agg_func='nanmean',
agg_freq='D',
model_template='SILVERKITE',
model_components=ModelComponentsParam(
autoregression={
'autoreg_dict': 'auto'
},
changepoints={
'changepoints_dict': None,
'seasonality_changepoints_dict': None
},
custom={
'fit_algorithm_dict': {
'fit_algorithm': 'ridge',
'fit_algorithm_params': None
},
'feature_sets_enabled': 'auto',
'max_daily_seas_interaction_order': 0,
'max_weekly_seas_interaction_order': 2,
'extra_pred_cols': [],
'min_admissible_value': None,
'max_admissible_value': None
},
events={
'holidays_to_model_separately': 'auto',
'holiday_lookup_countries': 'auto',
'holiday_pre_num_days': 1,
'holiday_post_num_days': 1,
'holiday_pre_post_num_dict': None,
'daily_event_df_dict': None
},
growth={
'growth_term': 'linear'
},
hyperparameter_override={},
regressors={
'regressor_cols': []
},
lagged_regressors={
'lagged_regressor_dict': None
},
seasonality={
'yearly_seasonality': 12,
'quarterly_seasonality': 5,
'monthly_seasonality': 5,
'weekly_seasonality': [1, 2],
'daily_seasonality': 0},
uncertainty={
'uncertainty_dict': None
})),
SilverkiteMultistageTemplateConfig(
train_length='7D',
fit_length=None,
agg_func='nanmean',
agg_freq=None,
model_template='SILVERKITE',
model_components=ModelComponentsParam(
autoregression={
'autoreg_dict': 'auto'
},
changepoints={
'changepoints_dict': None,
'seasonality_changepoints_dict': None
},
custom={
'fit_algorithm_dict': {
'fit_algorithm': 'ridge',
'fit_algorithm_params': None
},
'feature_sets_enabled': 'auto',
'max_daily_seas_interaction_order': 5,
'max_weekly_seas_interaction_order': 2,
'extra_pred_cols': [],
'min_admissible_value': None,
'max_admissible_value': None
},
events={
'holidays_to_model_separately': [],
'holiday_lookup_countries': [],
'holiday_pre_num_days': 0,
'holiday_post_num_days': 0,
'holiday_pre_post_num_dict': None,
'daily_event_df_dict': None
},
growth={
'growth_term': None
},
hyperparameter_override={},
regressors={
'regressor_cols': []
},
lagged_regressors={
'lagged_regressor_dict': None
},
seasonality={
'yearly_seasonality': 0,
'quarterly_seasonality': 0,
'monthly_seasonality': 0,
'weekly_seasonality': 0,
'daily_seasonality': [10, 12]
},
uncertainty={
'uncertainty_dict': None
}))]
def test_get_estimators_and_params_from_template_configs(df, forecast_config):
template = SilverkiteMultistageTemplate()
template.df = df
# Adds a list of length 2 to each submodel.
# The result hyperparameter grid should have 2 * 2 = 4 grids.
forecast_config.model_components_param.custom[
"silverkite_multistage_configs"][0].model_components.seasonality["weekly_seasonality"] = [1, 2]
forecast_config.model_components_param.custom[
"silverkite_multistage_configs"][1].model_components.seasonality["daily_seasonality"] = [10, 12]
template.config = forecast_config
default_model_components = template._SilverkiteMultistageTemplate__get_default_model_components(
forecast_config.model_template)
default_silverkite_multistage_configs = default_model_components.custom.get("silverkite_multistage_configs")
new_configs = template._SilverkiteMultistageTemplate__get_silverkite_multistage_configs_override(
custom=forecast_config.model_components_param.custom,
model_template="SILVERKITE_TWO_STAGE",
default_silverkite_multistage_configs=default_silverkite_multistage_configs
)
estimator_list, estimator_params_list = template._SilverkiteMultistageTemplate__get_estimators_and_params_from_template_configs(
new_configs=new_configs
)
# We can't test ``time_properties``
for d in estimator_params_list:
del d["estimator__time_properties"]
assert estimator_list == [SimpleSilverkiteEstimator, SimpleSilverkiteEstimator]
assert estimator_params_list == [
{
'estimator__yearly_seasonality': [12],
'estimator__quarterly_seasonality': [5],
'estimator__monthly_seasonality': [5],
'estimator__weekly_seasonality': [1, 2],
'estimator__daily_seasonality': [0],
'estimator__growth_term': ['linear'],
'estimator__changepoints_dict': [None],
'estimator__seasonality_changepoints_dict': [None],
'estimator__holidays_to_model_separately': ['auto'],
'estimator__holiday_lookup_countries': ['auto'],
'estimator__holiday_pre_num_days': [1],
'estimator__holiday_post_num_days': [1],
'estimator__holiday_pre_post_num_dict': [None],
'estimator__daily_event_df_dict': [None],
'estimator__feature_sets_enabled': ['auto'],
'estimator__fit_algorithm_dict': [{
'fit_algorithm': 'ridge',
'fit_algorithm_params': None}],
'estimator__max_daily_seas_interaction_order': [0],
'estimator__max_weekly_seas_interaction_order': [2],
'estimator__extra_pred_cols': [[]],
'estimator__drop_pred_cols': [None],
'estimator__explicit_pred_cols': [None],
'estimator__min_admissible_value': [None],
'estimator__max_admissible_value': [None],
'estimator__normalize_method': [None],
'estimator__autoreg_dict': ['auto'],
'estimator__simulation_num': [10],
'estimator__regressor_cols': [[]],
'estimator__lagged_regressor_dict': [None],
'estimator__regression_weight_col': [None],
'estimator__uncertainty_dict': [None],
'estimator__origin_for_time_vars': [None],
'estimator__train_test_thresh': [None],
'estimator__training_fraction': [None]
},
{
'estimator__yearly_seasonality': [0],
'estimator__quarterly_seasonality': [0],
'estimator__monthly_seasonality': [0],
'estimator__weekly_seasonality': [0],
'estimator__daily_seasonality': [10, 12],
'estimator__growth_term': [None],
'estimator__changepoints_dict': [None],
'estimator__seasonality_changepoints_dict': [None],
'estimator__holidays_to_model_separately': [[]],
'estimator__holiday_lookup_countries': [[]],
'estimator__holiday_pre_num_days': [0],
'estimator__holiday_post_num_days': [0],
'estimator__holiday_pre_post_num_dict': [None],
'estimator__daily_event_df_dict': [None],
'estimator__feature_sets_enabled': ['auto'],
'estimator__fit_algorithm_dict': [{
'fit_algorithm': 'ridge',
'fit_algorithm_params': None}],
'estimator__max_daily_seas_interaction_order': [5],
'estimator__max_weekly_seas_interaction_order': [2],
'estimator__extra_pred_cols': [[]],
'estimator__drop_pred_cols': [None],
'estimator__explicit_pred_cols': [None],
'estimator__min_admissible_value': [None],
'estimator__max_admissible_value': [None],
'estimator__normalize_method': [None],
'estimator__autoreg_dict': ['auto'],
'estimator__simulation_num': [10],
'estimator__regressor_cols': [[]],
'estimator__lagged_regressor_dict': [None],
'estimator__regression_weight_col': [None],
'estimator__uncertainty_dict': [None],
'estimator__origin_for_time_vars': [None],
'estimator__train_test_thresh': [None],
'estimator__training_fraction': [None]
}]
def test_flatten_estimator_params_list():
template = SilverkiteMultistageTemplate()
x = [{
"estimator__a": [1],
"estimator__b": [2, 3]
}, {
"estimator__c": [4, 5]
}]
flattened_params = template._SilverkiteMultistageTemplate__flatten_estimator_params_list(
estimator_params_list=x
)
assert flattened_params == [
[{'a': 1, 'b': 2}, {'c': 4}],
[{'a': 1, 'b': 2}, {'c': 5}],
[{'a': 1, 'b': 3}, {'c': 4}],
[{'a': 1, 'b': 3}, {'c': 5}]
]
def test_silverkite_multistage_model_template(df, forecast_config):
forecaster = Forecaster()
forecast_result = forecaster.run_forecast_config(
df=df,
config=forecast_config
)
assert forecast_result.backtest is not None
assert forecast_result.grid_search is not None
assert forecast_result.forecast is not None
assert len(forecast_result.model[-1].models) == 2
# Checks the forecast horizons in each model.
assert forecast_result.model[-1].models[0].forecast_horizon == 1 # daily model
assert forecast_result.model[-1].models[1].forecast_horizon == 12 # hourly model
# Checks the autoregression orders are as expected.
assert "y_lag1" in forecast_result.model[-1].models[0].model_dict["x_mat"].columns
assert "y_lag12" in forecast_result.model[-1].models[1].model_dict["x_mat"].columns
# Checks the forecast is not NAN
assert len(forecast_result.forecast.df_test[PREDICTED_COL].dropna()) == len(forecast_result.forecast.df_test)
assert len(forecast_result.backtest.df_test[PREDICTED_COL].dropna()) == len(forecast_result.backtest.df_test)
def test_silverkite_multistage_model_template_with_regressor(df, forecast_config):
forecaster = Forecaster()
forecast_config.model_components_param.custom[
"silverkite_multistage_configs"][0].model_components.regressors["regressor_cols"] = ["regressor"]
df.iloc[-12:, 1] = np.nan
forecast_result = forecaster.run_forecast_config(
df=df,
config=forecast_config
)
assert forecast_result.backtest is not None
assert forecast_result.grid_search is not None
assert forecast_result.forecast is not None
assert len(forecast_result.model[-1].models) == 2
# Checks the forecast horizons in each model.
assert forecast_result.model[-1].models[0].forecast_horizon == 1 # daily model
assert forecast_result.model[-1].models[1].forecast_horizon == 12 # hourly model
# Checks the autoregression orders are as expected.
assert "y_lag1" in forecast_result.model[-1].models[0].model_dict["x_mat"].columns
assert "y_lag12" in forecast_result.model[-1].models[1].model_dict["x_mat"].columns
# Checks that the regressor column is included.
assert "regressor" in forecast_result.model[-1].models[0].model_dict["x_mat"].columns
# Checks the forecast is not NAN
assert len(forecast_result.forecast.df_test[PREDICTED_COL].dropna()) == len(forecast_result.forecast.df_test)
assert len(forecast_result.backtest.df_test[PREDICTED_COL].dropna()) == len(forecast_result.backtest.df_test)
def test_silverkite_multistage_model_template_with_lagged_regressor(df, forecast_config):
forecaster = Forecaster()
forecast_config.model_components_param.custom[
"silverkite_multistage_configs"][0].model_components.lagged_regressors["lagged_regressor_dict"] = [{
"regressor": {
"lag_dict": {"orders": [12]},
"series_na_fill_func": lambda s: s.bfill().ffill()}
}]
forecast_result = forecaster.run_forecast_config(
df=df,
config=forecast_config
)
assert forecast_result.backtest is not None
assert forecast_result.grid_search is not None
assert forecast_result.forecast is not None
assert len(forecast_result.model[-1].models) == 2
# Checks the forecast horizons in each model.
assert forecast_result.model[-1].models[0].forecast_horizon == 1 # daily model
assert forecast_result.model[-1].models[1].forecast_horizon == 12 # hourly model
# Checks the autoregression orders are as expected.
assert "y_lag1" in forecast_result.model[-1].models[0].model_dict["x_mat"].columns
assert "y_lag12" in forecast_result.model[-1].models[1].model_dict["x_mat"].columns
# Checks that the regressor column is included.
assert "regressor_lag12" in forecast_result.model[-1].models[0].model_dict["x_mat"].columns
# Checks the forecast is not NAN
assert len(forecast_result.forecast.df_test[PREDICTED_COL].dropna()) == len(forecast_result.forecast.df_test)
assert len(forecast_result.backtest.df_test[PREDICTED_COL].dropna()) == len(forecast_result.backtest.df_test)
def test_errors(df, forecast_config):
# No configs with SILVERKITE_MULTISTAGE_EMPTY.
template = SilverkiteMultistageTemplate()
template.df = df
forecast_config.model_components_param.custom["silverkite_multistage_configs"] = None
forecast_config.model_template = "SILVERKITE_MULTISTAGE_EMPTY"
template.config = forecast_config
with pytest.raises(
ValueError,
match="``SILVERKITE_MULTISTAGE_EMPTY`` can not be used without over"):
template.get_hyperparameter_grid()
# The config has wrong type.
template = SilverkiteMultistageTemplate()
template.df = df
forecast_config.model_components_param.custom["silverkite_multistage_configs"] = 5
forecast_config.model_template = "SILVERKITE_TWO_STAGE"
template.config = forecast_config
with pytest.raises(
ValueError,
match="The ``silverkite_multistage_configs`` parameter must be a list of"):
template.get_hyperparameter_grid()
def test_get_default_model_components():
template = SilverkiteMultistageTemplate()
assert template._SilverkiteMultistageTemplate__get_default_model_components(
"SILVERKITE_TWO_STAGE") == SILVERKITE_TWO_STAGE
with pytest.raises(
ValueError,
match="The template name "):
template._SilverkiteMultistageTemplate__get_default_model_components("some_template")
def test_get_template_class():
template = SilverkiteMultistageTemplate()
assert template._SilverkiteMultistageTemplate__get_template_class(
ForecastConfig(model_template="SILVERKITE")
) == SimpleSilverkiteTemplate
with pytest.raises(
ValueError,
match="Currently Silverkite Multistage only supports"):
template._SilverkiteMultistageTemplate__get_template_class(
ForecastConfig(model_template="DAILY_CP_NONE")
)
def test_uncertainty(df, forecast_config):
"""Tests the uncertainty methods."""
# Tests no coverage and no uncertainty, there is no uncertainty.
forecaster = Forecaster()
forecast_result = forecaster.run_forecast_config(
df=df,
config=forecast_config
)
assert PREDICTED_LOWER_COL not in forecast_result.backtest.df_test
assert PREDICTED_LOWER_COL not in forecast_result.forecast.df_test
# Tests coverage and no uncertainty, there is uncertainty.
forecast_config.coverage = 0.99
forecaster = Forecaster()
forecast_result = forecaster.run_forecast_config(
df=df,
config=forecast_config
)
assert PREDICTED_LOWER_COL in forecast_result.backtest.df_test
assert PREDICTED_LOWER_COL in forecast_result.forecast.df_test
assert forecast_result.model[-1].coverage == 0.99
# Default method is used when coverage is given but ``uncertainty_dict`` is not given.
assert (forecast_result.model[-1].uncertainty_model.UNCERTAINTY_METHOD
== UncertaintyMethodEnum.simple_conditional_residuals.name)
last_interval_width_99 = (forecast_result.forecast.df[PREDICTED_UPPER_COL].iloc[-1]
- forecast_result.forecast.df[PREDICTED_LOWER_COL].iloc[-1])
# Tests coverage and uncertainty, there is uncertainty.
forecast_config.model_components_param.uncertainty = dict(
uncertainty_dict=dict(
uncertainty_method=UncertaintyMethodEnum.simple_conditional_residuals.name,
params=dict(
conditional_cols=["dow"]
)
)
)
forecaster = Forecaster()
forecast_result = forecaster.run_forecast_config(
df=df,
config=forecast_config
)
assert PREDICTED_LOWER_COL in forecast_result.backtest.df_test
assert PREDICTED_LOWER_COL in forecast_result.forecast.df_test
assert forecast_result.model[-1].coverage == 0.99
# The last 2 days intervals should have different lengths due to conditioning on "dow".
last_day_interval_width_99 = (forecast_result.forecast.df[PREDICTED_UPPER_COL].iloc[-1]
- forecast_result.forecast.df[PREDICTED_LOWER_COL].iloc[-1])
second_last_day_interval_width_99 = (forecast_result.forecast.df[PREDICTED_UPPER_COL].iloc[-25]
- forecast_result.forecast.df[PREDICTED_LOWER_COL].iloc[-25])
assert last_day_interval_width_99 != second_last_day_interval_width_99
# Tests 95% coverage has narrower interval.
forecast_config.coverage = 0.95
forecast_config.model_components_param.uncertainty = dict(
uncertainty_dict=dict(
uncertainty_method=UncertaintyMethodEnum.simple_conditional_residuals.name,
params=dict()
)
)
forecaster = Forecaster()
forecast_result = forecaster.run_forecast_config(
df=df,
config=forecast_config
)
assert PREDICTED_LOWER_COL in forecast_result.backtest.df_test
assert PREDICTED_LOWER_COL in forecast_result.forecast.df_test
assert forecast_result.model[-1].coverage == 0.95
# 95 interval is narrower than 99 interval.
last_interval_width_95 = (forecast_result.forecast.df[PREDICTED_UPPER_COL].iloc[-1]
- forecast_result.forecast.df[PREDICTED_LOWER_COL].iloc[-1])
assert last_interval_width_99 > last_interval_width_95
def test_uncertainty_fail(df, forecast_config):
"""Tests the pipeline won't fail when uncertainty fails."""
with LogCapture(LOGGER_NAME) as log_capture:
forecast_config.coverage = 0.95
forecast_config.model_components_param.uncertainty = dict(
uncertainty_dict=dict(
uncertainty_method=UncertaintyMethodEnum.simple_conditional_residuals.name,
params=dict(
conditional_cols=["dowww"]
)
)
)
forecaster = Forecaster()
forecast_result = forecaster.run_forecast_config(
df=df,
config=forecast_config
)
# The forecast is still generated.
assert forecast_result.forecast is not None
assert (LOGGER_NAME,
"WARNING",
"The following errors occurred during fitting the uncertainty model, "
"the uncertainty model is skipped. "
"The following conditional columns are not found in `train_df`: ['dowww'].") in log_capture.actual()
| 43.931871
| 132
| 0.645104
| 3,866
| 38,045
| 5.960942
| 0.078376
| 0.048601
| 0.041007
| 0.030202
| 0.824474
| 0.784378
| 0.754914
| 0.735995
| 0.706791
| 0.695596
| 0
| 0.013787
| 0.26219
| 38,045
| 865
| 133
| 43.982659
| 0.807196
| 0.07273
| 0
| 0.615797
| 0
| 0
| 0.236188
| 0.13251
| 0
| 0
| 0
| 0
| 0.097724
| 1
| 0.026774
| false
| 0
| 0.029451
| 0
| 0.060241
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 6
|
48f29fca4dd3704e67362a86a0ff19caa204fa8e
| 57
|
py
|
Python
|
Pi/Python_Code/1_helloworld.py
|
erickmusembi/Robot-Project
|
ed22b342fae2fb7f89f9e933f6052ae3cdc0c982
|
[
"MIT"
] | 1
|
2016-07-26T05:58:26.000Z
|
2016-07-26T05:58:26.000Z
|
Pi/Python_Code/1_helloworld.py
|
erickmusembi/Robot-Project
|
ed22b342fae2fb7f89f9e933f6052ae3cdc0c982
|
[
"MIT"
] | null | null | null |
Pi/Python_Code/1_helloworld.py
|
erickmusembi/Robot-Project
|
ed22b342fae2fb7f89f9e933f6052ae3cdc0c982
|
[
"MIT"
] | null | null | null |
#!/usr/bin/python
#Print Hello world
print "Hello World!"
| 19
| 20
| 0.736842
| 9
| 57
| 4.666667
| 0.666667
| 0.47619
| 0.714286
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.105263
| 57
| 3
| 20
| 19
| 0.823529
| 0.578947
| 0
| 0
| 0
| 0
| 0.521739
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | null | 0
| 0
| null | null | 1
| 1
| 0
| 0
| null | 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 1
|
0
| 6
|
5b0be51a21049576f99818d53b6ebdcbb0dcc832
| 6,584
|
py
|
Python
|
tests/test_mmotifs.py
|
SaVoAMP/stumpy
|
d63963caaf6a8b64448953f638c1d3345e05a36a
|
[
"BSD-3-Clause"
] | null | null | null |
tests/test_mmotifs.py
|
SaVoAMP/stumpy
|
d63963caaf6a8b64448953f638c1d3345e05a36a
|
[
"BSD-3-Clause"
] | null | null | null |
tests/test_mmotifs.py
|
SaVoAMP/stumpy
|
d63963caaf6a8b64448953f638c1d3345e05a36a
|
[
"BSD-3-Clause"
] | null | null | null |
import numpy as np
import numpy.testing as npt
import naive
import pytest
from stumpy import config, mmotifs
test_data = [
np.array(
[
[5.2, 0.1, 3.5, 3.4, 7.1, 9.8, 3.7, 5.0, 2.1, 4.3, 7.5, 6.8, 8.0, 8.1, 1.2],
[
7.3,
3.2,
5.0,
9.1,
8.2,
7.3,
4.8,
8.2,
10.0,
0.0,
4.1,
3.2,
2.3,
0.1,
1.4,
],
[6.2, 7.6, 7.6, 8.4, 1.1, 5.9, 9.2, 8.5, 9.3, 4.6, 3.5, 0.0, 3.1, 5.3, 0.9],
[
0.1,
1.3,
3.0,
2.1,
6.2,
1.3,
9.5,
10.0,
1.8,
2.0,
2.1,
5.2,
1.3,
0.5,
4.3,
],
]
)
]
@pytest.mark.parametrize("T", test_data)
def test_mmotifs_with_default_parameters(T):
motif_distances_ref = np.array([[0.0000000e00, 1.1151008e-07]])
motif_indices_ref = np.array([[2, 9]])
motif_subspaces_ref = [np.array([1])]
motif_mdls_ref = [np.array([232.0, 250.57542476, 260.0, 271.3509059])]
m = 4
excl_zone = int(np.ceil(m / config.STUMPY_EXCL_ZONE_DENOM))
P, I = naive.mstump(T, m, excl_zone)
(
motif_distances_cmp,
motif_indices_cmp,
motif_subspaces_cmp,
motif_mdls_cmp,
) = mmotifs(T, P, I)
npt.assert_array_almost_equal(motif_distances_ref, motif_distances_cmp)
npt.assert_array_almost_equal(motif_indices_ref, motif_indices_cmp)
npt.assert_array_almost_equal(motif_subspaces_ref, motif_subspaces_cmp)
npt.assert_array_almost_equal(motif_mdls_ref, motif_mdls_cmp)
@pytest.mark.parametrize("T", test_data)
def test_mmotifs_max_matches_none(T):
motif_distances_ref = np.array([[0.0000000e00, 1.1151008e-07]])
motif_indices_ref = np.array([[2, 9]])
motif_subspaces_ref = [np.array([1])]
motif_mdls_ref = [np.array([232.0, 250.57542476, 260.0, 271.3509059])]
m = 4
excl_zone = int(np.ceil(m / config.STUMPY_EXCL_ZONE_DENOM))
P, I = naive.mstump(T, m, excl_zone)
(
motif_distances_cmp,
motif_indices_cmp,
motif_subspaces_cmp,
motif_mdls_cmp,
) = mmotifs(T, P, I, max_matches=None)
npt.assert_array_almost_equal(motif_distances_ref, motif_distances_cmp)
npt.assert_array_almost_equal(motif_indices_ref, motif_indices_cmp)
npt.assert_array_almost_equal(motif_subspaces_ref, motif_subspaces_cmp)
npt.assert_array_almost_equal(motif_mdls_ref, motif_mdls_cmp)
@pytest.mark.parametrize("T", test_data)
def test_mmotifs_more_motifs_when_cutoffs_3(T):
motif_distances_ref = np.array([[0.0000000e00, 1.1151008e-07]])
motif_indices_ref = np.array([[2, 9]])
motif_subspaces_ref = [np.array([1])]
motif_mdls_ref = [np.array([232.0, 250.57542476, 260.0, 271.3509059])]
m = 4
excl_zone = int(np.ceil(m / config.STUMPY_EXCL_ZONE_DENOM))
P, I = naive.mstump(T, m, excl_zone)
(
motif_distances_cmp,
motif_indices_cmp,
motif_subspaces_cmp,
motif_mdls_cmp,
) = mmotifs(T, P, I, cutoffs=3, max_motifs=10)
npt.assert_array_almost_equal(motif_distances_ref, motif_distances_cmp)
npt.assert_array_almost_equal(motif_indices_ref, motif_indices_cmp)
npt.assert_array_almost_equal(motif_subspaces_ref, motif_subspaces_cmp)
npt.assert_array_almost_equal(motif_mdls_ref, motif_mdls_cmp)
@pytest.mark.parametrize("T", test_data)
def test_mmotifs_more_motifs_cutoffs_is_list(T):
motif_distances_ref = np.array([[0.0000000e00, 1.1151008e-07]])
motif_indices_ref = np.array([[2, 9]])
motif_subspaces_ref = [np.array([1])]
motif_mdls_ref = [np.array([232.0, 250.57542476, 260.0, 271.3509059])]
m = 4
cutoffs = [2, 3, 4, 5]
excl_zone = int(np.ceil(m / config.STUMPY_EXCL_ZONE_DENOM))
P, I = naive.mstump(T, m, excl_zone)
(
motif_distances_cmp,
motif_indices_cmp,
motif_subspaces_cmp,
motif_mdls_cmp,
) = mmotifs(T, P, I, cutoffs=cutoffs, max_motifs=10)
npt.assert_array_almost_equal(motif_distances_ref, motif_distances_cmp)
npt.assert_array_almost_equal(motif_indices_ref, motif_indices_cmp)
npt.assert_array_almost_equal(motif_subspaces_ref, motif_subspaces_cmp)
npt.assert_array_almost_equal(motif_mdls_ref, motif_mdls_cmp)
@pytest.mark.parametrize("T", test_data)
def test_mmotifs_max_matches_2_k_1(T):
motif_distances_ref = np.array([[0.0, 0.20948156]])
motif_indices_ref = np.array([[2, 9]])
motif_subspaces_ref = [np.array([1, 3])]
motif_mdls_ref = [np.array([232.0, 250.57542476, 260.0, 271.3509059])]
m = 4
excl_zone = int(np.ceil(m / config.STUMPY_EXCL_ZONE_DENOM))
P, I = naive.mstump(T, m, excl_zone)
(
motif_distances_cmp,
motif_indices_cmp,
motif_subspaces_cmp,
motif_mdls_cmp,
) = mmotifs(T, P, I, max_distance=np.inf, max_matches=2, k=1)
npt.assert_array_almost_equal(motif_distances_ref, motif_distances_cmp)
npt.assert_array_almost_equal(motif_indices_ref, motif_indices_cmp)
npt.assert_array_almost_equal(motif_subspaces_ref, motif_subspaces_cmp)
npt.assert_array_almost_equal(motif_mdls_ref, motif_mdls_cmp)
@pytest.mark.parametrize("T", test_data)
def test_mmotifs_two_motif_pairs_max_motifs_2(T):
motif_distances_ref = np.array(
[[0.00000000e00, 1.11510080e-07], [1.68587394e-07, 2.58694429e-01]]
)
motif_indices_ref = np.array([[2, 9], [6, 1]])
motif_subspaces_ref = [np.array([1]), np.array([2])]
motif_mdls_ref = [
np.array([232.0, 250.57542476, 260.0, 271.3509059]),
np.array([264.0, 280.0, 299.01955001, 310.51024953]),
]
m = 4
excl_zone = int(np.ceil(m / config.STUMPY_EXCL_ZONE_DENOM))
P, I = naive.mstump(T, m, excl_zone)
(
motif_distances_cmp,
motif_indices_cmp,
motif_subspaces_cmp,
motif_mdls_cmp,
) = mmotifs(
T, P, I, cutoffs=np.inf, max_motifs=2, max_distance=np.inf, max_matches=2
)
npt.assert_array_almost_equal(motif_distances_ref, motif_distances_cmp)
npt.assert_array_almost_equal(motif_indices_ref, motif_indices_cmp)
npt.assert_array_almost_equal(motif_subspaces_ref, motif_subspaces_cmp)
npt.assert_array_almost_equal(motif_mdls_ref, motif_mdls_cmp)
| 32.433498
| 88
| 0.636239
| 992
| 6,584
| 3.878024
| 0.087702
| 0.049129
| 0.062386
| 0.124773
| 0.883026
| 0.881726
| 0.875227
| 0.842215
| 0.842215
| 0.830777
| 0
| 0.101566
| 0.243317
| 6,584
| 202
| 89
| 32.594059
| 0.670614
| 0
| 0
| 0.633136
| 0
| 0
| 0.000911
| 0
| 0
| 0
| 0
| 0
| 0.142012
| 1
| 0.035503
| false
| 0
| 0.029586
| 0
| 0.065089
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 6
|
5b11294169a2545ace4b590947a477913a992f6b
| 238
|
py
|
Python
|
loss/loss_functions.py
|
vishalmanes109/nn-from-scratch
|
53de76f39dfea3f625ec542536f0ab3bc44d0224
|
[
"MIT"
] | 2
|
2020-10-09T05:50:14.000Z
|
2021-04-10T08:52:03.000Z
|
loss/loss_functions.py
|
vishalmanes109/nn-from-scratch
|
53de76f39dfea3f625ec542536f0ab3bc44d0224
|
[
"MIT"
] | null | null | null |
loss/loss_functions.py
|
vishalmanes109/nn-from-scratch
|
53de76f39dfea3f625ec542536f0ab3bc44d0224
|
[
"MIT"
] | null | null | null |
import numpy as np
def MSE(y, yhat):
return np.mean(np.power(y-yhat, 2))
def dMSE(y, yhat):
return 2*(yhat-y)/y.size
def MAE(y, yhat):
return np.sum(np.abs(y-yhat))
def dMAE(y, yhat):
return 1 if y == yhat else -1
| 13.222222
| 39
| 0.60084
| 48
| 238
| 2.979167
| 0.4375
| 0.244755
| 0.307692
| 0.181818
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.021858
| 0.231092
| 238
| 17
| 40
| 14
| 0.759563
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0.444444
| false
| 0
| 0.111111
| 0.444444
| 1
| 0
| 0
| 0
| 0
| null | 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 1
| 1
| 0
|
0
| 6
|
d2a6ab70e363fde2950efb4452d60062fc79d8a3
| 103
|
py
|
Python
|
odf/web/__main__.py
|
steven-hh-ding/OD1NF1ST
|
730d8690d745f80dcda6deb27a424b5254243de8
|
[
"Apache-2.0"
] | null | null | null |
odf/web/__main__.py
|
steven-hh-ding/OD1NF1ST
|
730d8690d745f80dcda6deb27a424b5254243de8
|
[
"Apache-2.0"
] | 1
|
2021-11-16T20:18:58.000Z
|
2021-11-16T20:18:58.000Z
|
odf/web/__main__.py
|
steven-hh-ding/OD1NF1ST
|
730d8690d745f80dcda6deb27a424b5254243de8
|
[
"Apache-2.0"
] | 1
|
2021-11-16T00:42:48.000Z
|
2021-11-16T00:42:48.000Z
|
from odf.web import start_integrated_server
if __name__ == '__main__':
start_integrated_server()
| 17.166667
| 43
| 0.776699
| 13
| 103
| 5.230769
| 0.769231
| 0.441176
| 0.617647
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.145631
| 103
| 5
| 44
| 20.6
| 0.772727
| 0
| 0
| 0
| 0
| 0
| 0.07767
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| true
| 0
| 0.333333
| 0
| 0.333333
| 0
| 1
| 0
| 0
| null | 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 0
| 0
| 0
|
0
| 6
|
d2b917b10d6195448d81c6382d50ca4fa26c03d7
| 65,070
|
py
|
Python
|
crnet_small.py
|
crwsr124/GANsNRoses
|
9f1a5455a3d448a9b25538bc32e878cb96be0933
|
[
"MIT"
] | null | null | null |
crnet_small.py
|
crwsr124/GANsNRoses
|
9f1a5455a3d448a9b25538bc32e878cb96be0933
|
[
"MIT"
] | null | null | null |
crnet_small.py
|
crwsr124/GANsNRoses
|
9f1a5455a3d448a9b25538bc32e878cb96be0933
|
[
"MIT"
] | null | null | null |
from math import factorial
import torch
import torch.nn as nn
from torch.nn.modules.padding import ReflectionPad2d
from torch.nn.modules.pooling import FractionalMaxPool2d
from torch.nn.parameter import Parameter
import math
class PixelNorm(nn.Module):
def __init__(self, num_channels=None):
super().__init__()
# num_channels is only used to match function signature with other normalization layers
# it has no actual use
def forward(self, input):
return input / torch.sqrt(torch.mean(input ** 2, dim=1, keepdim=True) + 1e-5)
class MLP(nn.Module):
def __init__(self, inc, dim, n_layers):
super().__init__()
ActFunc = nn.LeakyReLU(0.2)
mlp = [PixelNorm(),
nn.Linear(inc, dim),
ActFunc,
PixelNorm()]
for i in range(n_layers-2):
mlp.extend([
nn.Linear(dim, dim),
ActFunc,
PixelNorm()
])
mlp.extend(
[nn.Linear(dim, dim),
PixelNorm()])
self.dim = dim
self.mlp = nn.Sequential(*mlp)
def forward(self, x):
b, c = x.size(0), x.size(1)
x = x.view(b, c)
x = self.mlp(x)
return x
class LayerInstanceNorm(nn.Module):
def __init__(self, dim):
super().__init__()
self.eps = 1e-6
self.gamma = nn.Parameter(torch.FloatTensor(1, dim, 1, 1).fill_(1.0))
self.beta = nn.Parameter(torch.FloatTensor(1, dim, 1, 1).fill_(0.0))
self.rho = nn.Parameter(torch.FloatTensor(1, dim, 1, 1).fill_(0.0))
def forward(self, x):
b, c, h, w = x.shape
ins_mean = x.view(b, c, -1).mean(dim=2).view(b, c, 1, 1)
ins_val = x.view(b, c, -1).var(dim=2).view(b, c, 1, 1) + self.eps
ins_std = ins_val.sqrt()
ln_mean = x.view(b, -1).mean(dim=1).view(b, 1, 1, 1)
ln_val = x.view(b, -1).var(dim=1).view(b, 1, 1, 1) + self.eps
ln_std = ln_val.sqrt()
rho = torch.clamp(self.rho, 0, 1)
x_ins = (x - ins_mean) / ins_std
x_ln = (x - ln_mean) / ln_std
x_hat = rho * x_ins + (1 - rho) * x_ln
return x_hat * self.gamma + self.beta
class AdaLIN(nn.Module):
def __init__(self, z_dim=256):
super().__init__()
self.eps = 1e-6
self.rho = nn.Parameter(torch.FloatTensor(1).fill_(1.0))
self.gamma = nn.Linear(z_dim, z_dim)
self.beta = nn.Linear(z_dim, z_dim)
def forward(self, x, z):
b,c,h,w = x.shape
ins_mean = x.view(b,c, -1).mean(dim=2).view(b, c, 1, 1)
ins_var = x.view(b,c,-1).var(dim=2) + self.eps
ins_std = ins_var.sqrt().view(b, c, 1, 1)
x_ins = (x - ins_mean) / ins_std
ln_mean = x.view(b, -1).mean(dim=1).view(b, 1, 1, 1)
ln_val = x.view(b, -1).var(dim=1).view(b, 1, 1, 1) + self.eps
ln_std = ln_val.sqrt()
x_ln = (x - ln_mean) / ln_std
rho = (self.rho - 0.1).clamp(0, 1.0) # smoothing
x_hat = rho * x_ins + (1-rho) * x_ln
gamma = self.gamma(z).view(b, c, 1, 1)
beta = self.beta(z).view(b, c, 1, 1)
# print("::::::::sss")
# print(gamma)
# print(beta)
x_hat = x_hat * gamma + beta
# x_hat = x_hat * 1.5 + 5
return x_hat
class ResBlockByAdaLIN(nn.Module):
def __init__(self, dim):
super().__init__()
fan_in = dim * 3 ** 2
self.scale = 1 / math.sqrt(fan_in)
self.conv1 = nn.Sequential(
nn.ReflectionPad2d(1),
nn.Conv2d(dim, dim, 3, 1, 0, groups=dim//4),
)
self.conv2 = nn.Sequential(
nn.ReflectionPad2d(1),
nn.Conv2d(dim, dim, 3, 1, 0, groups=dim//4),
)
self.addin_1 = AdaLIN(dim)
self.addin_2 = AdaLIN(dim)
# self.relu = nn.ReLU()
self.relu = nn.LeakyReLU(0.2, True)
def forward(self, x, z):
x1 = self.conv1(x)
x1 = self.relu(self.addin_1(x1, z))
x2 = self.conv2(x1)
x2 = self.addin_2(x2, z)
return x + x2
class CRGenerator_small(nn.Module):
def __init__(self, input_nc=3, output_nc=3, ngf=64, n_blocks=6, img_size=256, light=False):
assert(n_blocks >= 0)
super(CRGenerator_small, self).__init__()
self.input_nc = input_nc
self.output_nc = output_nc
self.ngf = ngf
self.n_blocks = n_blocks
self.img_size = img_size
self.light = light
self.encoder = CREncoder2()
self.decoder = CRDecoder()
def forward(self, input, z_noise):
encoder_out, feature, z, cam_logit, heatmap = self.encoder(input)
z = z + z_noise
out = self.decoder(encoder_out, z)
return out, cam_logit, heatmap, encoder_out, feature, z
class CREncoder(nn.Module):
def __init__(self):
# assert(n_blocks >= 0)
super(CREncoder, self).__init__()
self.input_nc = 3
self.output_nc = 256
# self.ngf = 64
self.n_blocks = 6
self.img_size = 256
self.light = False
# in:256x256 out:256x256
self.DownBlock1 = nn.Sequential(
nn.ReflectionPad2d(3),
nn.Conv2d(self.input_nc, 64, kernel_size=7, stride=1, padding=0, bias=False),
nn.InstanceNorm2d(64),
nn.ReLU(True))
# in:256x256 out:128x128
self.DownBlock2 = nn.Sequential(
nn.ReflectionPad2d(1),
nn.Conv2d(64, 128, kernel_size=3, stride=2, padding=0, bias=False),
nn.InstanceNorm2d(128),
nn.ReLU(True))
# in:128x128 out:64x64
self.DownBlock3 = nn.Sequential(
nn.ReflectionPad2d(1),
nn.Conv2d(128, 256, kernel_size=3, stride=2, padding=0, bias=False),
nn.InstanceNorm2d(256),
nn.ReLU(True))
# in:64x64 out:32x32
self.DownBlock4 = nn.Sequential(
nn.ReflectionPad2d(1),
nn.Conv2d(256, 256, kernel_size=3, stride=2, padding=0, bias=False),
nn.InstanceNorm2d(256),
nn.ReLU(True))
# in:32x32 out:16x16
self.DownBlock5 = nn.Sequential(
nn.ReflectionPad2d(1),
nn.Conv2d(256, 256, kernel_size=3, stride=2, padding=0, bias=False),
nn.InstanceNorm2d(256),
nn.ReLU(True))
# in:16x16 out:8x8
self.DownBlock6 = nn.Sequential(
nn.ReflectionPad2d(1),
nn.Conv2d(256, 256, kernel_size=3, stride=2, padding=0, bias=False),
nn.InstanceNorm2d(256),
nn.ReLU(True))
# Down-Sampling Bottleneck
self.ResBlock = nn.Sequential(
ResnetBlock(256, use_bias=False),
ResnetBlock(256, use_bias=False),
ResnetBlock(256, use_bias=False))
# Class Activation Map
self.gap_fc = nn.Linear(256, 1, bias=False)
self.gmp_fc = nn.Linear(256, 1, bias=False)
self.conv1x1 = nn.Conv2d(256 * 2, 256, kernel_size=1, stride=1, bias=True)
self.relu = nn.ReLU(True)
# z generator
self.FC = nn.Sequential(
nn.Linear(256*8*8, 256, bias=False),
nn.ReLU(True),
nn.Linear(256, 256, bias=False))
def forward(self, input):
# downsample
x = self.DownBlock1(input)
#print("1: " + str(x.shape))
x = self.DownBlock2(x)
#print("2: " + str(x.shape))
encoder_out = self.DownBlock3(x)
x = self.DownBlock4(encoder_out)
#print("4: " + str(x.shape))
x = self.DownBlock5(x)
#print("5: " + str(x.shape))
x = self.DownBlock6(x)
#print("6: " + str(x.shape))
# CAM
gap = torch.nn.functional.adaptive_avg_pool2d(x, 1)
gap_logit = self.gap_fc(gap.view(x.shape[0], -1))
gap_weight = list(self.gap_fc.parameters())[0]
gap = encoder_out * gap_weight.unsqueeze(2).unsqueeze(3)
gmp = torch.nn.functional.adaptive_max_pool2d(x, 1)
gmp_logit = self.gmp_fc(gmp.view(x.shape[0], -1))
gmp_weight = list(self.gmp_fc.parameters())[0]
gmp = encoder_out * gmp_weight.unsqueeze(2).unsqueeze(3)
cam_logit = torch.cat([gap_logit, gmp_logit], 1)
encoder_out = torch.cat([gap, gmp], 1)
encoder_out = self.relu(self.conv1x1(encoder_out))
#print("7encoder_out: " + str(encoder_out.shape))
heatmap = torch.sum(encoder_out, dim=1, keepdim=True)
#print("8: " + str(heatmap.shape))
# z
feature = self.ResBlock(x)
#print("9: " + str(feature.shape))
z = self.FC(feature.view(feature.shape[0], -1))
#print("10: " + str(z.shape))
return encoder_out, feature, z, cam_logit, heatmap
class CREncoder2(nn.Module):
def __init__(self):
# assert(n_blocks >= 0)
super(CREncoder2, self).__init__()
self.input_nc = 3
self.output_nc = 256
# self.ngf = 64
self.n_blocks = 6
self.img_size = 256
self.light = False
# in:256x256 out:256x256
self.DownBlock1 = nn.Sequential(
nn.ReflectionPad2d(1),
nn.Conv2d(self.input_nc, 64, kernel_size=3, stride=1, padding=0, bias=False),
nn.InstanceNorm2d(64),
nn.ReLU(True))
# in:256x256 out:128x128
self.DownBlock2 = nn.Sequential(
nn.ReflectionPad2d(1),
nn.Conv2d(64, 128, kernel_size=3, stride=2, padding=0, bias=False),
nn.InstanceNorm2d(128),
nn.ReLU(True))
# in:128x128 out:64x64
self.DownBlock3 = nn.Sequential(
nn.ReflectionPad2d(1),
nn.Conv2d(128, 256, kernel_size=3, stride=2, padding=0, bias=False),
nn.InstanceNorm2d(256),
nn.ReLU(True))
self.content_ResBlock = nn.Sequential(
ResnetBlock(256, use_bias=False))
# nn.UpsamplingBilinear2d(scale_factor=2),
# nn.ReflectionPad2d(1),
# nn.Conv2d(256, 256, kernel_size=3, stride=1, padding=0, bias=False),
# nn.InstanceNorm2d(256),
# nn.ReLU(True))
# in:64x64 out:32x32
self.DownBlock4 = nn.Sequential(
nn.ReflectionPad2d(1),
nn.Conv2d(256, 256, kernel_size=3, stride=2, padding=0, bias=False),
nn.InstanceNorm2d(256),
nn.ReLU(True))
# in:32x32 out:16x16
self.DownBlock5 = nn.Sequential(
nn.ReflectionPad2d(1),
nn.Conv2d(256, 256, kernel_size=3, stride=2, padding=0, groups=256, bias=False),
nn.InstanceNorm2d(256),
nn.ReLU(True))
# in:16x16 out:8x8
self.DownBlock6 = nn.Sequential(
nn.ReflectionPad2d(1),
nn.Conv2d(256, 256, kernel_size=3, stride=2, padding=0, groups=256, bias=False),
nn.InstanceNorm2d(256),
nn.ReLU(True))
# Down-Sampling Bottleneck
self.ResBlock = nn.Sequential(
ResnetBlock(256, use_bias=False),
ResnetBlock(256, use_bias=False),
ResnetBlock(256, use_bias=False))
# Class Activation Map
self.gap_fc = nn.Linear(256, 1, bias=False)
self.gmp_fc = nn.Linear(256, 1, bias=False)
self.conv1x1 = nn.Conv2d(256 * 2, 256, kernel_size=1, stride=1, bias=True)
self.relu = nn.ReLU(True)
# z generator
self.FC = nn.Sequential(
nn.Linear(256*8*8, 256, bias=False),
nn.LeakyReLU(True),
nn.Linear(256, 256, bias=False))
def forward(self, input):
# downsample
x = self.DownBlock1(input)
#print("1: " + str(x.shape))
x = self.DownBlock2(x)
# print("2: " + str(x.shape))
encoder_out_before = self.DownBlock3(x)
x = self.DownBlock4(encoder_out_before)
# print("4: " + str(encoder_out_begore.shape))
x = self.DownBlock5(x)
# print("5: " + str(x.shape))
x = self.DownBlock6(x)
# print("6: " + str(x.shape))
# CAM
gap = torch.nn.functional.adaptive_avg_pool2d(encoder_out_before, 1)
gap_logit = self.gap_fc(gap.view(x.shape[0], -1))
gap_weight = list(self.gap_fc.parameters())[0]
gap = encoder_out_before * gap_weight.unsqueeze(2).unsqueeze(3)
gmp = torch.nn.functional.adaptive_max_pool2d(encoder_out_before, 1)
gmp_logit = self.gmp_fc(gmp.view(x.shape[0], -1))
gmp_weight = list(self.gmp_fc.parameters())[0]
gmp = encoder_out_before * gmp_weight.unsqueeze(2).unsqueeze(3)
cam_logit = torch.cat([gap_logit, gmp_logit], 1)
encoder_out_before = torch.cat([gap, gmp], 1)
encoder_out_before = self.relu(self.conv1x1(encoder_out_before))
encoder_out = self.content_ResBlock(encoder_out_before)
heatmap = torch.sum(encoder_out, dim=1, keepdim=True)
#print("8: " + str(heatmap.shape))
# print("7encoder_out: " + str(encoder_out.shape))
feature = self.ResBlock(x)
#print("9: " + str(feature.shape))
z = self.FC(feature.view(feature.shape[0], -1))
#print("10: " + str(z.shape))
return encoder_out, feature, z, cam_logit, heatmap
class UpsampleBlock(nn.Module):
def __init__(self, inc, outc, k, s, group):
super(UpsampleBlock, self).__init__()
up = []
up += [nn.Upsample(scale_factor=2, mode='nearest'),
nn.ReflectionPad2d(1),
nn.Conv2d(inc, outc, kernel_size=k, stride=s, padding=0, bias=False, groups = group)]
self.up = nn.Sequential(*up)
self.fc = nn.Linear(inc, outc, bias=False)
self.relu1 = nn.ReLU(True)
self.adalin = AdaLIN(outc)
self.relu2 = nn.ReLU(True)
def forward(self, x, z):
#print("11111111k:::::::::::::::::: " + str(x.shape))
x = self.up(x)
z = self.relu1(self.fc(z))
x = self.adalin(x, z)
x = self.relu2(x)
return x, z
class UpsampleBlock3(nn.Module):
def __init__(self, inc, outc, k, s, group):
super(UpsampleBlock3, self).__init__()
fan_in = inc * 3 ** 2
self.scale = 1 / math.sqrt(fan_in)
up = []
up += [nn.Upsample(scale_factor=2, mode='bilinear'),
nn.ReflectionPad2d(1),
nn.Conv2d(inc, outc, kernel_size=k, stride=s, padding=0, bias=False, groups = group)]
self.up = nn.Sequential(*up)
self.fc = nn.Linear(512, outc, bias=True)
self.relu1 = nn.LeakyReLU(0.2, True)
self.adalin = AdaLIN(outc)
self.relu2 = nn.LeakyReLU(0.2, True)
self.rgb_up = nn.Upsample(scale_factor=2, mode='bilinear')
self.conv2rgb = nn.Sequential(
nn.ReflectionPad2d(1),
nn.Conv2d(outc, 3, kernel_size=k, stride=s, padding=0, bias=True, groups = 1),
# nn.LeakyReLU(0.2, True)
)
self.rgb_fc = nn.Linear(512, 3, bias=True)
self.rgb_relu = nn.LeakyReLU(0.2, True)
self.rgb_adalin = AdaLIN(3)
def forward(self, x, z, rgb):
x = self.up(x)*self.scale
# print("11111111k:::::::::::::::::: " + str(x.shape))
z1 = self.relu1(self.fc(z))
# print("11111112k:::::::::::::::::: " + str(z.shape))
x = self.adalin(x, z1)
x = self.relu2(x)
z_rgb = self.rgb_relu(self.rgb_fc(z))
ttt = self.rgb_adalin(self.conv2rgb(x), z_rgb)
rgb = self.rgb_up(rgb) + ttt*self.scale
return x, rgb
class UpsampleBlock2(nn.Module):
def __init__(self, inc, outc, k, s, group):
super(UpsampleBlock2, self).__init__()
up = []
up += [nn.Upsample(scale_factor=2, mode='nearest'),
nn.ReflectionPad2d(1),
nn.Conv2d(inc, outc, kernel_size=k, stride=s, padding=0, bias=False, groups = group)]
self.up = nn.Sequential(*up)
self.lin = LayerInstanceNorm(outc)
def forward(self, x, z):
#print("11111111k:::::::::::::::::: " + str(x.shape))
x = self.up(x)
x = self.lin(x)
return x
class CRDecoder(nn.Module):
def __init__(self):
# assert(n_blocks >= 0)
super(CRDecoder, self).__init__()
self.input_nc = 256
self.output_nc = 3
# self.ngf = 64
self.n_blocks = 3
self.img_size = 256
self.light = False
# MLP
self.mlp = MLP(256, 256, 8)
adain_resblock = []
for i in range(self.n_blocks):
adain_resblock.append(ResBlockByAdaLIN(256))
self.adain_resblocks = nn.ModuleList(adain_resblock)
# in:64x64 out:128x128
self.upsample1 = UpsampleBlock2(256, 64, k = 3, s = 1, group=4)
# in:128x128 out:256x256
self.upsample2 = UpsampleBlock2(64, 32, k = 3, s = 1, group=2)
# final
final = [nn.ReflectionPad2d(3),
nn.Conv2d(32, self.output_nc, kernel_size=7, stride=1, padding=0, bias=False, groups=1),
nn.Tanh()]
self.final = nn.Sequential(*final)
def forward(self, x, z):
z = self.mlp(z) # b, 256
#print("11: " + str(z.shape))
#print("12: " + str(x.shape))
for i in range(self.n_blocks):
x = self.adain_resblocks[i](x, z)
#print("12k:::::::::::::::::: " + str(x.shape))
x = self.upsample1(x, z)
x = self.upsample2(x, z)
out = self.final(x)
return out
class CRDecoder_rose(nn.Module):
def __init__(self):
# assert(n_blocks >= 0)
super(CRDecoder_rose, self).__init__()
self.input_nc = 512
self.output_nc = 3
# self.ngf = 64
self.n_blocks = 3
self.img_size = 256
self.light = False
# MLP
self.mlp = MLP(8, 512, 8)
adain_resblock = []
for i in range(self.n_blocks):
adain_resblock.append(ResBlockByAdaLIN(512))
self.adain_resblocks = nn.ModuleList(adain_resblock)
# in:16x16 out:32x32
self.upsample1 = UpsampleBlock(512, 256, k = 3, s = 1, group=4)
# in:32x32 out:64x64
self.upsample2 = UpsampleBlock(256, 128, k = 3, s = 1, group=4)
# in:64x64 out:128x128
self.upsample3 = UpsampleBlock(128, 64, k = 3, s = 1, group=4)
# in:128x128 out:256x256
self.upsample4 = UpsampleBlock(64, 32, k = 3, s = 1, group=2)
# final
final = [nn.ReflectionPad2d(3),
nn.Conv2d(32, self.output_nc, kernel_size=7, stride=1, padding=0, bias=False, groups=1),
nn.Tanh()]
self.final = nn.Sequential(*final)
self.initialize_module(self)
def initialize_module(self, module):
for m in module.modules():
# print(":::::::::::::::::::")
# print(m)
if isinstance(m, (nn.Conv2d, nn.ConvTranspose2d)):
nn.init.kaiming_uniform_(m.weight, mode="fan_in", nonlinearity="relu")
if m.bias is not None:
nn.init.constant_(m.bias, 0)
elif isinstance(m, nn.BatchNorm2d):
nn.init.constant_(m.weight, 1)
nn.init.constant_(m.bias, 0)
elif isinstance(m, nn.Linear):
nn.init.xavier_uniform_(m.weight)
if m.bias is not None:
nn.init.constant_(m.bias, 0)
def forward(self, x, z):
z = self.mlp(z) # b, 256
#print("11: " + str(z.shape))
#print("12: " + str(x.shape))
for i in range(self.n_blocks):
x = self.adain_resblocks[i](x, z)
#print("12k:::::::::::::::::: " + str(x.shape))
x, z = self.upsample1(x, z)
x, z = self.upsample2(x, z)
x, z = self.upsample3(x, z)
x, z = self.upsample4(x, z)
out = self.final(x)
return out
class CRDecoder_rose3(nn.Module):
def __init__(self):
# assert(n_blocks >= 0)
super(CRDecoder_rose3, self).__init__()
self.input_nc = 512
self.output_nc = 3
# self.ngf = 64
self.n_blocks = 1
self.img_size = 256
self.light = False
fan_in = self.input_nc * 3 ** 2
self.scale = 1 / math.sqrt(fan_in)
# MLP
self.mlp = MLP(8, 512, 8)
adain_resblock = []
for i in range(self.n_blocks):
adain_resblock.append(ResBlockByAdaLIN(512))
self.adain_resblocks = nn.ModuleList(adain_resblock)
self.conv2rgb = nn.Sequential(
nn.ReflectionPad2d(1),
nn.Conv2d(512, 3, kernel_size=3, stride=1, padding=0, bias=True, groups = 1),
# nn.LeakyReLU(0.2, True)
)
self.rgb_fc = nn.Linear(512, 3, bias=True)
self.rgb_relu = nn.LeakyReLU(0.2, True)
self.rgb_adalin = AdaLIN(3)
# in:16x16 out:32x32
self.upsample1 = UpsampleBlock3(512, 256, k = 3, s = 1, group=4)
# in:32x32 out:64x64
self.upsample2 = UpsampleBlock3(256, 128, k = 3, s = 1, group=4)
# in:64x64 out:128x128
self.upsample3 = UpsampleBlock3(128, 64, k = 3, s = 1, group=4)
# in:128x128 out:256x256
self.upsample4 = UpsampleBlock3(64, 32, k = 3, s = 1, group=2)
# final
final = [nn.ReflectionPad2d(3),
nn.Conv2d(32, self.output_nc, kernel_size=7, stride=1, padding=0, bias=False, groups=1),
nn.Tanh()]
self.final = nn.Sequential(*final)
self.initialize_module(self)
def initialize_module(self, module):
for m in module.modules():
# print(":::::::::::::::::::")
# print(m)
if isinstance(m, (nn.Conv2d, nn.ConvTranspose2d)):
nn.init.kaiming_uniform_(m.weight, mode="fan_in", nonlinearity="leaky_relu")
if m.bias is not None:
nn.init.constant_(m.bias, 0)
elif isinstance(m, nn.BatchNorm2d):
nn.init.constant_(m.weight, 1)
nn.init.constant_(m.bias, 0)
elif isinstance(m, nn.Linear):
nn.init.xavier_uniform_(m.weight)
if m.bias is not None:
nn.init.constant_(m.bias, 0)
def forward(self, x, z):
z = self.mlp(z) # b, 256
# print("11: " + str(z))
#print("12: " + str(x.shape))
for i in range(self.n_blocks):
x = self.adain_resblocks[i](x, z)*self.scale
# print (x.shape)
rgb_z = self.rgb_relu(self.rgb_fc(z))
rgb = self.rgb_adalin(self.conv2rgb(x), rgb_z)*self.scale
# rgb = self.conv2rgb(x)*self.scale
# print (rgb)
x, rgb = self.upsample1(x, z, rgb)
x, rgb = self.upsample2(x, z, rgb)
x, rgb = self.upsample3(x, z, rgb)
x, rgb = self.upsample4(x, z, rgb)
# out = self.final(x)
return rgb
class ResnetBlock(nn.Module):
def __init__(self, dim, use_bias):
super(ResnetBlock, self).__init__()
conv_block = []
conv_block += [nn.ReflectionPad2d(1),
nn.Conv2d(dim, dim, kernel_size=3, stride=1, padding=0, bias=use_bias),
nn.InstanceNorm2d(dim),
nn.ReLU(True)]
conv_block += [nn.ReflectionPad2d(1),
nn.Conv2d(dim, dim, kernel_size=3, stride=1, padding=0, bias=use_bias),
nn.InstanceNorm2d(dim)]
self.conv_block = nn.Sequential(*conv_block)
def forward(self, x):
out = x + self.conv_block(x)
return out
class ResnetAdaILNBlock(nn.Module):
def __init__(self, dim, use_bias):
super(ResnetAdaILNBlock, self).__init__()
self.pad1 = nn.ReflectionPad2d(1)
self.conv1 = nn.Conv2d(dim, dim, kernel_size=3, stride=1, padding=0, bias=use_bias)
self.norm1 = adaILN(dim)
self.relu1 = nn.ReLU(True)
self.pad2 = nn.ReflectionPad2d(1)
self.conv2 = nn.Conv2d(dim, dim, kernel_size=3, stride=1, padding=0, bias=use_bias)
self.norm2 = adaILN(dim)
def forward(self, x, gamma, beta):
out = self.pad1(x)
out = self.conv1(out)
out = self.norm1(out, gamma, beta)
out = self.relu1(out)
out = self.pad2(out)
out = self.conv2(out)
out = self.norm2(out, gamma, beta)
return out + x
class adaILN(nn.Module):
def __init__(self, num_features, eps=1e-5):
super(adaILN, self).__init__()
self.eps = eps
self.rho = Parameter(torch.Tensor(1, num_features, 1, 1))
self.rho.data.fill_(0.9)
def forward(self, input, gamma, beta):
in_mean, in_var = torch.mean(input, dim=[2, 3], keepdim=True), torch.var(input, dim=[2, 3], keepdim=True)
out_in = (input - in_mean) / torch.sqrt(in_var + self.eps)
ln_mean, ln_var = torch.mean(input, dim=[1, 2, 3], keepdim=True), torch.var(input, dim=[1, 2, 3], keepdim=True)
out_ln = (input - ln_mean) / torch.sqrt(ln_var + self.eps)
out = self.rho.expand(input.shape[0], -1, -1, -1) * out_in + (1-self.rho.expand(input.shape[0], -1, -1, -1)) * out_ln
out = out * gamma.unsqueeze(2).unsqueeze(3) + beta.unsqueeze(2).unsqueeze(3)
return out
class ILN(nn.Module):
def __init__(self, num_features, eps=1e-5):
super(ILN, self).__init__()
self.eps = eps
self.rho = Parameter(torch.Tensor(1, num_features, 1, 1))
self.gamma = Parameter(torch.Tensor(1, num_features, 1, 1))
self.beta = Parameter(torch.Tensor(1, num_features, 1, 1))
self.rho.data.fill_(0.0)
self.gamma.data.fill_(1.0)
self.beta.data.fill_(0.0)
def forward(self, input):
in_mean, in_var = torch.mean(input, dim=[2, 3], keepdim=True), torch.var(input, dim=[2, 3], keepdim=True)
out_in = (input - in_mean) / torch.sqrt(in_var + self.eps)
ln_mean, ln_var = torch.mean(input, dim=[1, 2, 3], keepdim=True), torch.var(input, dim=[1, 2, 3], keepdim=True)
out_ln = (input - ln_mean) / torch.sqrt(ln_var + self.eps)
out = self.rho.expand(input.shape[0], -1, -1, -1) * out_in + (1-self.rho.expand(input.shape[0], -1, -1, -1)) * out_ln
out = out * self.gamma.expand(input.shape[0], -1, -1, -1) + self.beta.expand(input.shape[0], -1, -1, -1)
return out
class RhoClipper(object):
def __init__(self, min, max):
self.clip_min = min
self.clip_max = max
assert min < max
def __call__(self, module):
if hasattr(module, 'rho'):
w = module.rho.data
w = w.clamp(self.clip_min, self.clip_max)
module.rho.data = w
from torch.nn import functional as F
channels = {
4: 512,
8: 512,
16: 512,
32: 512,
64: 256,
128: 128,
256: 64,
512: 32,
1024: 16,
}
channels2 = {
16: 512,
32: 128,
64: 64,
128: 32,
256: 16,
}
class FusedLeakyReLU(nn.Module):
def __init__(self, channel, negative_slope=0.2, scale=2 ** 0.5):
super().__init__()
self.bias = nn.Parameter(torch.zeros(channel))
self.negative_slope = negative_slope
self.scale = scale
def forward(self, input):
return fused_leaky_relu(input, self.bias, self.negative_slope, self.scale)
def fused_leaky_relu(input, bias, negative_slope=0.2, scale=2 ** 0.5):
rest_dim = [1] * (input.ndim - bias.ndim - 1)
if input.ndim == 3:
return (
F.leaky_relu(
input + bias.view(1, *rest_dim, bias.shape[0]), negative_slope=negative_slope
)
* scale
)
else:
return (
F.leaky_relu(
input + bias.view(1, bias.shape[0], *rest_dim), negative_slope=negative_slope
)
* scale
)
def upfirdn2d_native(
input, kernel, up_x, up_y, down_x, down_y, pad_x0, pad_x1, pad_y0, pad_y1
):
_, channel, in_h, in_w = input.shape
input = input.reshape(-1, in_h, in_w, 1)
_, in_h, in_w, minor = input.shape
kernel_h, kernel_w = kernel.shape
out = input.view(-1, in_h, 1, in_w, 1, minor)
out = F.pad(out, [0, 0, 0, up_x - 1, 0, 0, 0, up_y - 1])
out = out.view(-1, in_h * up_y, in_w * up_x, minor)
out = F.pad(
out, [0, 0, max(pad_x0, 0), max(pad_x1, 0), max(pad_y0, 0), max(pad_y1, 0)]
)
out = out[
:,
max(-pad_y0, 0) : out.shape[1] - max(-pad_y1, 0),
max(-pad_x0, 0) : out.shape[2] - max(-pad_x1, 0),
:,
]
out = out.permute(0, 3, 1, 2)
out = out.reshape(
[-1, 1, in_h * up_y + pad_y0 + pad_y1, in_w * up_x + pad_x0 + pad_x1]
)
w = torch.flip(kernel, [0, 1]).view(1, 1, kernel_h, kernel_w)
out = F.conv2d(out, w)
out = out.reshape(
-1,
minor,
in_h * up_y + pad_y0 + pad_y1 - kernel_h + 1,
in_w * up_x + pad_x0 + pad_x1 - kernel_w + 1,
)
out = out.permute(0, 2, 3, 1)
out = out[:, ::down_y, ::down_x, :]
out_h = (in_h * up_y + pad_y0 + pad_y1 - kernel_h) // down_y + 1
out_w = (in_w * up_x + pad_x0 + pad_x1 - kernel_w) // down_x + 1
return out.view(-1, channel, out_h, out_w)
def make_kernel(k):
k = torch.tensor(k, dtype=torch.float32)
if k.ndim == 1:
k = k[None, :] * k[:, None]
k /= k.sum()
return k
def upfirdn2d(input, kernel, up=1, down=1, pad=(0, 0)):
out = upfirdn2d_native(
input, kernel, up, up, down, down, pad[0], pad[1], pad[0], pad[1]
)
return out
class Upsample(nn.Module):
def __init__(self, kernel, factor=2):
super().__init__()
self.factor = factor
kernel = make_kernel(kernel) * (factor ** 2)
self.register_buffer('kernel', kernel)
p = kernel.shape[0] - factor
pad0 = (p + 1) // 2 + factor - 1
pad1 = p // 2
self.pad = (pad0, pad1)
def forward(self, input):
out = upfirdn2d(input, self.kernel, up=self.factor, down=1, pad=self.pad)
return out
class Blur(nn.Module):
def __init__(self, kernel, pad, upsample_factor=1):
super().__init__()
kernel = make_kernel(kernel)
if upsample_factor > 1:
kernel = kernel * (upsample_factor ** 2)
self.register_buffer('kernel', kernel)
self.pad = pad
def forward(self, input):
out = upfirdn2d(input, self.kernel, pad=self.pad)
return out
class EqualLinear(nn.Module):
def __init__(
self, in_dim, out_dim, bias=True, bias_init=0, lr_mul=1, activation=None
):
super().__init__()
self.weight = nn.Parameter(torch.randn(out_dim, in_dim).div_(lr_mul))
if bias:
self.bias = nn.Parameter(torch.zeros(out_dim).fill_(bias_init))
else:
self.bias = None
self.activation = activation
self.scale = (1 / math.sqrt(in_dim)) * lr_mul
self.lr_mul = lr_mul
def forward(self, input):
bias = self.bias*self.lr_mul if self.bias is not None else None
if self.activation:
out = F.linear(input, self.weight * self.scale)
out = fused_leaky_relu(out, bias)
else:
out = F.linear(
input, self.weight * self.scale, bias=bias
)
return out
def __repr__(self):
return (
f'{self.__class__.__name__}({self.weight.shape[1]}, {self.weight.shape[0]})'
)
# class ModulatedConv2d(nn.Module):
# def __init__(
# self,
# in_channel,
# out_channel,
# kernel_size,
# style_dim,
# use_style=True,
# demodulate=True,
# upsample=False,
# downsample=False,
# blur_kernel=[1, 3, 3, 1],
# ):
# super().__init__()
# self.eps = 1e-8
# self.kernel_size = kernel_size
# self.in_channel = in_channel
# self.out_channel = out_channel
# self.upsample = upsample
# self.downsample = downsample
# self.use_style = use_style
# if upsample:
# factor = 2
# p = (len(blur_kernel) - factor) - (kernel_size - 1)
# pad0 = (p + 1) // 2 + factor - 1
# pad1 = p // 2 + 1
# self.blur = Blur(blur_kernel, pad=(pad0, pad1), upsample_factor=factor)
# if downsample:
# factor = 2
# p = (len(blur_kernel) - factor) + (kernel_size - 1)
# pad0 = (p + 1) // 2
# pad1 = p // 2
# self.blur = Blur(blur_kernel, pad=(pad0, pad1))
# fan_in = in_channel * kernel_size ** 2
# self.scale = 1 / math.sqrt(fan_in)
# self.padding = kernel_size // 2
# self.weight = nn.Parameter(
# torch.randn(1, out_channel, in_channel, kernel_size, kernel_size)
# )
# if use_style:
# self.modulation = EqualLinear(style_dim, in_channel, bias_init=1)
# else:
# self.modulation = nn.Parameter(torch.Tensor(1, 1, in_channel, 1, 1).fill_(1))
# self.demodulate = demodulate
# def __repr__(self):
# return (
# f'{self.__class__.__name__}({self.in_channel}, {self.out_channel}, {self.kernel_size}, '
# f'upsample={self.upsample}, downsample={self.downsample})'
# )
# def forward(self, input, style):
# batch, in_channel, height, width = input.shape
# if self.use_style:
# style = self.modulation(style).view(batch, 1, in_channel, 1, 1)
# weight = self.scale * self.weight * style
# else:
# weight = self.scale * self.weight.expand(batch,-1,-1,-1,-1) * self.modulation
# if self.demodulate:
# demod = torch.rsqrt(weight.pow(2).sum([2, 3, 4]) + 1e-8)
# weight = weight * demod.view(batch, self.out_channel, 1, 1, 1)
# weight = weight.view(
# batch * self.out_channel, in_channel, self.kernel_size, self.kernel_size
# )
# if self.upsample:
# input = input.view(1, batch * in_channel, height, width)
# weight = weight.view(
# batch, self.out_channel, in_channel, self.kernel_size, self.kernel_size
# )
# weight = weight.transpose(1, 2).reshape(
# batch * in_channel, self.out_channel, self.kernel_size, self.kernel_size
# )
# out = F.conv_transpose2d(input, weight, padding=0, stride=2, groups=batch)
# _, _, height, width = out.shape
# out = out.view(batch, self.out_channel, height, width)
# out = self.blur(out)
# elif self.downsample:
# input = self.blur(input)
# _, _, height, width = input.shape
# input = input.view(1, batch * in_channel, height, width)
# out = F.conv2d(input, weight, padding=0, stride=2, groups=batch)
# _, _, height, width = out.shape
# out = out.view(batch, self.out_channel, height, width)
# else:
# input = input.view(1, batch * in_channel, height, width)
# out = F.conv2d(input, weight, padding=self.padding, groups=batch)
# _, _, height, width = out.shape
# out = out.view(batch, self.out_channel, height, width)
# return out
class ModulatedConv2d(nn.Module):
def __init__(
self,
in_channel,
out_channel,
kernel_size,
style_dim,
use_style=True,
demodulate=True,
upsample=False,
downsample=False,
blur_kernel=[1, 3, 3, 1],
cgroup = 1,
):
super().__init__()
self.eps = 1e-8
self.kernel_size = kernel_size
self.in_channel = in_channel
self.out_channel = out_channel
self.upsample = upsample
self.downsample = downsample
self.use_style = use_style
self.cgroup = cgroup
if upsample:
factor = 2
p = (len(blur_kernel) - factor) - (kernel_size - 1)
pad0 = (p + 1) // 2 + factor - 1
pad1 = p // 2 + 1
self.blur = Blur(blur_kernel, pad=(pad0, pad1), upsample_factor=factor)
# self.blur = nn.Sequential(
# nn.ConstantPad2d((0, -1, 0,-1), 0),
# nn.Upsample(scale_factor=1, mode="bilinear"))
if downsample:
factor = 2
p = (len(blur_kernel) - factor) + (kernel_size - 1)
pad0 = (p + 1) // 2
pad1 = p // 2
self.blur = Blur(blur_kernel, pad=(pad0, pad1))
fan_in = in_channel * kernel_size ** 2
self.scale = 1 / math.sqrt(fan_in)
self.padding = kernel_size // 2
if upsample:
self.weight = nn.Parameter(
torch.randn(1, out_channel//2, in_channel, kernel_size, kernel_size)
)
else:
self.weight = nn.Parameter(
torch.randn(1, out_channel, in_channel//self.cgroup, kernel_size, kernel_size)
)
# self.wl = nn.ReLU6(nn.Tanh())
# self.convs = nn.Conv2d(1 * in_channel, self.out_channel, self.kernel_size, stride=1,groups=cgroup,padding=self.padding)
# self.convs_d = nn.Conv2d(1 * in_channel, self.out_channel, self.kernel_size, stride=2,groups=1,padding=0)
# self.convs_t = nn.ConvTranspose2d(1 * in_channel, self.out_channel, self.kernel_size, stride=2,groups=2,padding=0)
if use_style:
self.modulation = EqualLinear(style_dim, in_channel//self.cgroup, bias_init=1)
# self.modulation = nn.Sequential(
# nn.LeakyReLU(negative_slope=0.2),
# nn.Linear(style_dim, in_channel//self.cgroup))
else:
self.modulation = nn.Parameter(torch.Tensor(1, 1, in_channel, 1, 1).fill_(1))
self.demodulate = demodulate
def __repr__(self):
return (
f'{self.__class__.__name__}({self.in_channel}, {self.out_channel}, {self.kernel_size}, '
f'upsample={self.upsample}, downsample={self.downsample})'
)
def forward(self, input, style):
batch, in_channel, height, width = input.shape
if self.use_style:
if self.upsample:
style = self.modulation(style).view(batch, 1, in_channel, 1, 1)
else:
style = self.modulation(style).view(batch, 1, in_channel//self.cgroup, 1, 1)
weight = self.scale * self.weight * style
else:
weight = self.scale * self.weight.expand(batch,-1,-1,-1,-1) * self.modulation
if self.demodulate:
demod = torch.rsqrt(weight.pow(2).sum([2, 3, 4]) + 1e-8)
if self.upsample:
weight = weight * demod.view(batch, self.out_channel//2, 1, 1, 1)
else:
weight = weight * demod.view(batch, self.out_channel, 1, 1, 1)
if self.upsample:
weight = weight.view(
batch * self.out_channel//2, in_channel, self.kernel_size, self.kernel_size
)
else:
weight = weight.view(
batch * self.out_channel, in_channel//self.cgroup, self.kernel_size, self.kernel_size
)
if self.upsample:
input = input.view(1, batch * in_channel, height, width)
weight = weight.view(
batch, self.out_channel//2, in_channel, self.kernel_size, self.kernel_size
)
weight = weight.transpose(1, 2).reshape(
batch * in_channel, self.out_channel//2, self.kernel_size, self.kernel_size
)
out = F.conv_transpose2d(input, weight, padding=0, stride=2, groups=batch*2)
_, _, height, width = out.shape
out = out.view(batch, self.out_channel, height, width)
out = self.blur(out)
elif self.downsample:
input = self.blur(input)
_, _, height, width = input.shape
input = input.view(1, batch * in_channel, height, width)
out = F.conv2d(input, weight, padding=0, stride=2, groups=batch)
_, _, height, width = out.shape
out = out.view(batch, self.out_channel, height, width)
else:
input = input.view(1, batch * in_channel, height, width)
out = F.conv2d(input, weight, padding=self.padding, groups=batch*self.cgroup)
_, _, height, width = out.shape
out = out.view(batch, self.out_channel, height, width)
return out
class ModulatedConv2d2(nn.Module):
def __init__(
self,
in_channel,
out_channel,
kernel_size,
style_dim,
use_style=True,
demodulate=True,
upsample=False,
downsample=False,
blur_kernel=[1, 3, 3, 1],
cgroup = 1,
):
super().__init__()
self.eps = 1e-8
self.kernel_size = kernel_size
self.in_channel = in_channel
self.out_channel = out_channel
self.upsample = upsample
self.downsample = downsample
self.use_style = use_style
self.cgroup = cgroup
if upsample:
factor = 2
p = (len(blur_kernel) - factor) - (kernel_size - 1)
pad0 = (p + 1) // 2 + factor - 1
pad1 = p // 2 + 1
# self.blur = Blur(blur_kernel, pad=(pad0, pad1), upsample_factor=factor)
self.blur = nn.Sequential(
nn.ConstantPad2d((0, -1, 0,-1), 0),
nn.Upsample(scale_factor=1, mode="bilinear"))
if downsample:
factor = 2
p = (len(blur_kernel) - factor) + (kernel_size - 1)
pad0 = (p + 1) // 2
pad1 = p // 2
self.blur = Blur(blur_kernel, pad=(pad0, pad1))
fan_in = in_channel * kernel_size ** 2
self.scale = 1 / math.sqrt(fan_in)
self.padding = kernel_size // 2
if upsample:
self.weight = nn.Parameter(
torch.randn(1, out_channel//2, in_channel, kernel_size, kernel_size)
)
else:
self.weight = nn.Parameter(
torch.randn(1, out_channel, in_channel//self.cgroup, kernel_size, kernel_size)
)
self.wl = nn.ReLU6(nn.Tanh())
self.convs = nn.Conv2d(1 * in_channel, self.out_channel, self.kernel_size, stride=1,groups=cgroup,padding=self.padding)
self.convs_d = nn.Conv2d(1 * in_channel, self.out_channel, self.kernel_size, stride=2,groups=1,padding=0)
self.convs_t = nn.ConvTranspose2d(1 * in_channel, self.out_channel, self.kernel_size, stride=2,groups=2,padding=0)
if use_style:
# self.modulation = EqualLinear(style_dim, in_channel//self.cgroup, bias_init=1)
self.modulation = nn.Sequential(
nn.LeakyReLU(negative_slope=0.2),
nn.Linear(style_dim, in_channel//self.cgroup))
else:
self.modulation = nn.Parameter(torch.Tensor(1, 1, in_channel, 1, 1).fill_(1))
self.demodulate = demodulate
def __repr__(self):
return (
f'{self.__class__.__name__}({self.in_channel}, {self.out_channel}, {self.kernel_size}, '
f'upsample={self.upsample}, downsample={self.downsample})'
)
def forward(self, input, style):
batch, in_channel, height, width = input.shape
if self.use_style:
if self.upsample:
style = self.modulation(style).view(batch, 1, in_channel, 1, 1)
else:
style = self.modulation(style).view(batch, 1, in_channel//self.cgroup, 1, 1)
weight = self.scale * self.weight * style
weight = self.wl(weight)
else:
weight = self.scale * self.weight.expand(batch,-1,-1,-1,-1) * self.modulation
if self.demodulate:
demod = torch.rsqrt(weight.pow(2).sum([2, 3, 4]) + 1e-8)
if self.upsample:
weight = weight * demod.view(batch, self.out_channel//2, 1, 1, 1)
else:
weight = weight * demod.view(batch, self.out_channel, 1, 1, 1)
if self.upsample:
weight = weight.view(
batch * self.out_channel//2, in_channel, self.kernel_size, self.kernel_size
)
else:
weight = weight.view(
batch * self.out_channel, in_channel//self.cgroup, self.kernel_size, self.kernel_size
)
if self.upsample:
input = input.view(1, batch * in_channel, height, width)
weight = weight.view(
batch, self.out_channel//2, in_channel, self.kernel_size, self.kernel_size
)
weight = weight.transpose(1, 2).reshape(
batch * in_channel, self.out_channel//2, self.kernel_size, self.kernel_size
)
out = F.conv_transpose2d(input, weight, padding=0, stride=2, groups=batch*2)
# out = self.convs_t(input)
_, _, height, width = out.shape
out = out.view(batch, self.out_channel, height, width)
# print(out.shape)
out = self.blur(out)
# print(out.shape)
elif self.downsample:
input = self.blur(input)
_, _, height, width = input.shape
input = input.view(1, batch * in_channel, height, width)
# out = F.conv2d(input, weight, padding=0, stride=2, groups=batch)
out = self.convs_d(input)
_, _, height, width = out.shape
out = out.view(batch, self.out_channel, height, width)
else:
input = input.view(1, batch * in_channel, height, width)
# out = F.conv2d(input, weight, padding=self.padding, groups=batch*self.cgroup)
out = self.convs(input)
_, _, height, width = out.shape
out = out.view(batch, self.out_channel, height, width)
return out
class StyledConv(nn.Module):
def __init__(
self,
in_channel,
out_channel,
kernel_size,
style_dim,
use_style=True,
upsample=False,
downsample=False,
blur_kernel=[1, 3, 3, 1],
demodulate=True,
activation = True,
group = 1,
):
super().__init__()
self.use_style = use_style
self.activation = activation
self.conv = ModulatedConv2d(
in_channel,
out_channel,
kernel_size,
style_dim,
use_style=use_style,
upsample=upsample,
downsample=downsample,
blur_kernel=blur_kernel,
demodulate=demodulate,
cgroup=group,
)
#if use_style:
# self.noise = NoiseInjection()
#else:
# self.noise = None
# self.bias = nn.Parameter(torch.zeros(1, out_channel, 1, 1))
# self.activate = ScaledLeakyReLU(0.2)
self.activate = FusedLeakyReLU(out_channel)
def forward(self, input, style=None, noise=None):
out = self.conv(input, style)
#if self.use_style:
# out = self.noise(out, noise=noise)
# out = out + self.bias
if self.activation:
out = self.activate(out)
return out
class StyledConv2(nn.Module):
def __init__(
self,
in_channel,
out_channel,
kernel_size,
style_dim,
use_style=True,
upsample=False,
downsample=False,
blur_kernel=[1, 3, 3, 1],
demodulate=True,
):
super().__init__()
self.use_style = use_style
self.conv = ModulatedConv2d2(
in_channel,
out_channel,
kernel_size,
style_dim,
use_style=use_style,
upsample=upsample,
downsample=downsample,
blur_kernel=blur_kernel,
demodulate=demodulate,
)
# self.conv = nn.Conv2d(in_channel, out_channel,3,1,0, groups=2)
#if use_style:
# self.noise = NoiseInjection()
#else:
# self.noise = None
# self.bias = nn.Parameter(torch.zeros(1, out_channel, 1, 1))
# self.activate = ScaledLeakyReLU(0.2)
# self.activate = FusedLeakyReLU(out_channel)
def forward(self, input, style=None, noise=None):
out = self.conv(input, style)
# out = self.conv(input)
#if self.use_style:
# out = self.noise(out, noise=noise)
# out = out + self.bias
# out = self.activate(out)
return out
class ToRGB(nn.Module):
def __init__(self, in_channel, style_dim, upsample=True, blur_kernel=[1, 3, 3, 1]):
super().__init__()
if upsample:
self.upsample = Upsample(blur_kernel)
self.conv = ModulatedConv2d(in_channel, 4, 1, style_dim, demodulate=False, cgroup=1)
self.bias = nn.Parameter(torch.zeros(1, 4, 1, 1))
def forward(self, input, style, skip=None):
out = self.conv(input, style)
out = out + self.bias
if skip is not None:
skip = self.upsample(skip)
out = out + skip
return out
class ToRGB2(nn.Module):
def __init__(self, in_channel, style_dim, upsample=True, blur_kernel=[1, 3, 3, 1]):
super().__init__()
if upsample:
# self.upsample = Upsample(blur_kernel)
self.upsample = nn.Sequential(nn.Upsample(scale_factor=2, mode="bilinear"))
self.conv = ModulatedConv2d2(in_channel, 3, 1, style_dim, demodulate=False)
self.bias = nn.Parameter(torch.zeros(1, 3, 1, 1))
def forward(self, input, style, skip=None):
out = self.conv(input, style)
out = out + self.bias
if skip is not None:
skip = self.upsample(skip)
out = out + skip
return out
class StyledResBlock(nn.Module):
def __init__(self, in_channel, style_dim, blur_kernel=[1, 3, 3, 1], demodulate=True):
super().__init__()
self.conv1 = StyledConv(in_channel, in_channel*2, 1, style_dim, upsample=False, blur_kernel=blur_kernel, demodulate=demodulate)
self.conv2 = StyledConv(in_channel*2, in_channel*2, 3, style_dim, upsample=False, blur_kernel=blur_kernel, demodulate=demodulate, group=in_channel)
self.conv3 = StyledConv(in_channel*2, in_channel, 1, style_dim, upsample=False, blur_kernel=blur_kernel, demodulate=demodulate, activation=False)
# self.conv1 = StyledConv(in_channel, in_channel//2, 3, style_dim, upsample=False, blur_kernel=blur_kernel, demodulate=demodulate)
# self.conv2 = StyledConv(in_channel//2, in_channel, 3, style_dim, upsample=False, blur_kernel=blur_kernel, demodulate=demodulate)
def forward(self, input, style):
out = self.conv1(input, style)
out = self.conv2(out, style)
out = self.conv3(out, style)
out = (out + input) / math.sqrt(2)
return out
class StyledRes(nn.Module):
def __init__(self, in_channel, style_dim, blur_kernel=[1, 3, 3, 1], demodulate=True, largec=True):
super().__init__()
k = 1
if largec:
k = 2
self.conv1 = StyledConv(in_channel, in_channel*k, 1, style_dim, upsample=False, blur_kernel=blur_kernel, demodulate=demodulate)
self.conv2 = StyledConv(in_channel*k, in_channel*k, 3, style_dim, upsample=False, blur_kernel=blur_kernel, demodulate=demodulate, group=in_channel)
self.conv3 = StyledConv(in_channel*k, in_channel, 1, style_dim, upsample=False, blur_kernel=blur_kernel, demodulate=demodulate, activation=False)
# self.conv1 = StyledConv(in_channel, in_channel//2, 3, style_dim, upsample=False, blur_kernel=blur_kernel, demodulate=demodulate)
# self.conv2 = StyledConv(in_channel//2, in_channel, 3, style_dim, upsample=False, blur_kernel=blur_kernel, demodulate=demodulate)
def forward(self, input, style):
out = self.conv1(input, style)
out = self.conv2(out, style)
out = self.conv3(out, style)
# out = (out + input) / math.sqrt(2)
return out
#1.4G flops
class Decoder_kkk(nn.Module):
def __init__(
self
):
# assert(n_blocks >= 0)
super(Decoder_kkk, self).__init__()
self.input_nc = 512
self.output_nc = 3
self.size = 256
self.num_down = 4
self.latent_dim = 8
self.n_mlp = 5
channel_multiplier=1
blur_kernel=[1, 3, 3, 1]
lr_mlp=0.01
# self.use_mapping=True
self.log_size = int(math.log(self.size, 2)) #7
in_log_size = self.log_size - self.num_down #7-2 or 7-3
in_size = 2 ** in_log_size
style_dim = 512
in_channel = 512
# MLP
self.mapping = MLP(self.latent_dim, style_dim, self.n_mlp)
self.adain_bottleneck = nn.ModuleList()
for i in range(2):
self.adain_bottleneck.append(StyledResBlock(in_channel, style_dim))
self.conv1 = StyledConv(in_channel, 512, 1, style_dim, upsample=False, blur_kernel=blur_kernel)
self.conv1_s = StyledConv(512, 512, 3, style_dim, upsample=False, blur_kernel=blur_kernel, group=512)
self.conv1_ss = StyledConv(512, 256, 1, style_dim, upsample=False, blur_kernel=blur_kernel, activation=False)
self.to_rgb1 = ToRGBcr(256, style_dim, upsample=False)
self.to_alpha = nn.Sigmoid()
self.convs = nn.ModuleList()
self.to_rgbs = nn.ModuleList()
in_channel = 256
for i in range(in_log_size+1, self.log_size + 1):
out_channel = channels2[2 ** i]
# print(":::::kk:::", out_channel)
self.convs.append(
StyledConv2dUpcr(in_channel, out_channel, style_dim)
# StyledConv(
# in_channel,
# out_channel,
# 3,
# style_dim,
# upsample=True,
# blur_kernel=blur_kernel,
# )
)
# self.convs.append(
# StyledConv(
# out_channel, out_channel, 3, style_dim, blur_kernel=blur_kernel
# )
# )
self.convs.append(
StyledConv2dcr(out_channel, out_channel, style_dim, 3)
# StyledRes(out_channel, style_dim,largec=False)
)
self.to_rgbs.append(ToRGBcr(out_channel, style_dim))
in_channel = out_channel
def forward(self, input, styles):
styles = self.mapping(styles)
#styles = styles.repeat(1, n_latent).view(styles.size(0), n_latent, -1)
out = input
i = 0
for conv in self.adain_bottleneck:
out = conv(out, styles)
i += 1
out = self.conv1(out, styles, noise=None)
out = self.conv1_s(out, styles, noise=None)
out = self.conv1_ss(out, styles, noise=None)
skip = self.to_rgb1(out, styles)
i += 2
for conv1, conv2, to_rgb in zip(
self.convs[::2], self.convs[1::2], self.to_rgbs
):
out = conv1(out, styles, noise=None)
out = conv2(out, styles)
skip = to_rgb(out, styles, skip)
i += 3
image = skip[:,0:3,:,:]
alpha = self.to_alpha(skip[:,3:4,:,:])
return image, alpha
from modulated_conv2d import *
# from modulated_conv2d import ModulatedDWConv2dcr
class Decoder_kkk2(nn.Module):
def __init__(
self
):
# assert(n_blocks >= 0)
super(Decoder_kkk2, self).__init__()
self.input_nc = 512
self.output_nc = 3
self.size = 256
self.num_down = 4
self.latent_dim = 8
self.n_mlp = 5
channel_multiplier=1
blur_kernel=[1, 3, 3, 1]
lr_mlp=0.01
# self.use_mapping=True
self.log_size = int(math.log(self.size, 2)) #7
in_log_size = self.log_size - self.num_down #7-2 or 7-3
in_size = 2 ** in_log_size
style_dim = 512
in_channel = 512
# MLP
self.mapping = MLP(self.latent_dim, style_dim, self.n_mlp)
self.adain_bottleneck = nn.ModuleList()
for i in range(2):
self.adain_bottleneck.append(StyledResBlockcr(in_channel, style_dim))
# self.conv1 = StyledConv2dcr(in_channel, 512, style_dim, 3)
self.conv1 = StyledConv2dcr(in_channel, 256, style_dim, 3)
# self.conv1 = StyledConv(in_channel, 512, 1, style_dim, upsample=False, blur_kernel=blur_kernel)
# self.conv1_s = StyledConv(512, 512, 3, style_dim, upsample=False, blur_kernel=blur_kernel, group=512)
# self.conv1_ss = StyledConv(512, 256, 1, style_dim, upsample=False, blur_kernel=blur_kernel, activation=False)
self.to_rgb1 = ToRGBcr(256, style_dim, upsample=False)
self.to_alpha = nn.Sigmoid()
self.convs = nn.ModuleList()
self.to_rgbs = nn.ModuleList()
in_channel = 256
for i in range(in_log_size+1, self.log_size + 1):
out_channel = channels2[2 ** i]
# print(":::::kk:::", out_channel)
self.convs.append(
StyledConv2dUpcr(in_channel, out_channel, style_dim)
)
self.convs.append(
StyledConv2dcr(out_channel, out_channel, style_dim, 3)
)
self.to_rgbs.append(ToRGBcr(out_channel, style_dim))
in_channel = out_channel
def forward(self, input, styles):
styles = self.mapping(styles)
#styles = styles.repeat(1, n_latent).view(styles.size(0), n_latent, -1)
out = input
i = 0
for conv in self.adain_bottleneck:
out = conv(out, styles)
i += 1
out = self.conv1(out, styles, noise=None)
skip = self.to_rgb1(out, styles)
i += 2
for conv1, conv2, to_rgb in zip(
self.convs[::2], self.convs[1::2], self.to_rgbs
):
out = conv1(out, styles, noise=None)
out = conv2(out, styles)
skip = to_rgb(out, styles, skip)
i += 3
image = skip[:,0:3,:,:]
alpha = self.to_alpha(skip[:,3:4,:,:])
return image, alpha
class Decoder_kkk512(nn.Module):
def __init__(
self
):
# assert(n_blocks >= 0)
super(Decoder_kkk512, self).__init__()
self.input_nc = 512
self.output_nc = 3
self.size = 256
self.num_down = 4
self.latent_dim = 8
self.n_mlp = 5
channel_multiplier=1
blur_kernel=[1, 3, 3, 1]
lr_mlp=0.01
# self.use_mapping=True
self.log_size = int(math.log(self.size, 2)) #7
in_log_size = self.log_size - self.num_down #7-2 or 7-3
in_size = 2 ** in_log_size
style_dim = 512
in_channel = 512
# MLP
self.mapping = MLP(self.latent_dim, style_dim, self.n_mlp)
self.adain_bottleneck = nn.ModuleList()
for i in range(2):
self.adain_bottleneck.append(StyledResBlockcr(in_channel, style_dim))
# self.conv1 = StyledConv2dcr(in_channel, 512, style_dim, 3)
self.conv1 = StyledConv2dcr(in_channel, 256, style_dim, 3)
# self.conv1 = StyledConv(in_channel, 512, 1, style_dim, upsample=False, blur_kernel=blur_kernel)
# self.conv1_s = StyledConv(512, 512, 3, style_dim, upsample=False, blur_kernel=blur_kernel, group=512)
# self.conv1_ss = StyledConv(512, 256, 1, style_dim, upsample=False, blur_kernel=blur_kernel, activation=False)
self.to_rgb1 = ToRGBcr(256, style_dim, upsample=False)
self.to_alpha = nn.Sigmoid()
self.convs = nn.ModuleList()
self.to_rgbs = nn.ModuleList()
in_channel = 256
for i in range(in_log_size+1, self.log_size + 1):
out_channel = channels2[2 ** i]
# print(":::::kk:::", out_channel)
self.convs.append(
StyledConv2dUpcr(in_channel, out_channel, style_dim)
)
self.convs.append(
StyledConv2dcr(out_channel, out_channel, style_dim, 3)
)
self.to_rgbs.append(ToRGBcr(out_channel, style_dim))
in_channel = out_channel
self.ups2 = nn.Upsample(scale_factor=2, mode='bilinear', align_corners=True)
self.conv512 = nn.Sequential(
nn.ReflectionPad2d(1),
nn.Conv2d(3, 9, kernel_size=3, stride=1, padding=0, bias=False),
# nn.InstanceNorm2d(9),
nn.ReLU(True),
nn.ReflectionPad2d(1),
nn.Conv2d(9, 3, kernel_size=3, stride=1, padding=0, bias=False),
# nn.InstanceNorm2d(3),
)
def forward(self, input, styles):
styles = self.mapping(styles)
#styles = styles.repeat(1, n_latent).view(styles.size(0), n_latent, -1)
out = input
i = 0
for conv in self.adain_bottleneck:
out = conv(out, styles)
i += 1
out = self.conv1(out, styles, noise=None)
skip = self.to_rgb1(out, styles)
i += 2
for conv1, conv2, to_rgb in zip(
self.convs[::2], self.convs[1::2], self.to_rgbs
):
out = conv1(out, styles, noise=None)
out = conv2(out, styles)
skip = to_rgb(out, styles, skip)
i += 3
image256 = skip[:,0:3,:,:]
alpha = self.to_alpha(skip[:,3:4,:,:])
image512 = self.ups2(image256)
delta = 0.01*self.conv512(image512)
image512 = image512 + delta
return image256, alpha, image512
from thop import profile
from matplotlib import pyplot as plt
from skimage import io
from skimage import transform
import numpy as np
# from torchstat import stat
# from ptflops import get_model_complexity_info
def count_your_model(model):
sum = 0
for m in model.modules():
print(m)
if isinstance(m, (nn.Conv2d)):
sum = (m.weight.shape[2]*m.weight.shape[3]*m.in_channels*m.out_channels+m.out_channels)*256*256
print(m.weight.shape[0], m.weight.shape[1],m.weight.shape[2],m.weight.shape[3])
elif isinstance(m, ModulatedConv2d2):
sum = (m.weight.shape[2]*m.weight.shape[3]*m.in_channel*m.out_channel+m.out_channel)
elif isinstance(m, nn.Linear):
nn.init.xavier_uniform_(m.weight)
if m.bias is not None:
nn.init.constant_(m.bias, 0)
print(sum)
return 10000
#return y.size(2) * y.size(3) * y.size(1) * self.conv.weight.size(2) * self.conv.weight.size(3) / 1
def test():
torch.set_printoptions(precision=4, sci_mode=False)
# net = CRGenerator_small(3, 3, 32, 3, 256, False)
# net = CRDecoder_rose3()
net = Decoder_kkk2()
style = torch.randn(1, 8)
style2 = torch.randn(1, 8)
style3 = torch.randn(2, 8)
# z_bb = torch.randn(1, 256)*100
# noo = nn.LayerNorm(256)
# z_bb = noo(z_bb)
# z_kk = 0
x1 = torch.randn(1,3,256,256)
x2 = torch.randn(1,512,16,16)
x3 = torch.randn(2,512,16,16)
img = io.imread("/Users/cr/git/face/Morph-UGATIT/datasets/trainA/female_4493.jpg")
img = transform.resize(img, (16, 16))
img_tensor = torch.from_numpy(img.transpose((2,0,1)))
img_tensor = torch.reshape(img_tensor, (1, 3, 16, 16))[:,0:2,:,:]
# x2 = img_tensor.repeat((1, 256, 1, 1)).float()
# print (x2.shape)
# print(x2)
out, alpha = net(x2, style)
out2, alpha = net(x2, style2)
# print(alpha)
# stat(net, (3, 256, 256) )
#flops,params = get_model_complexity_info(net,(1,3,256,256),as_strings=True,print_per_layer_stat=True)
# print("________________________\n" + str(net))
million = 100 * 10000
flops256, _ = profile(net, (x2,style,))
print("decoder flops", flops256/million)
total = sum([param.nelement() for param in net.parameters()])
print("params:::::::::", total/million)
# net2 = MLP(8, 512, 8)
xx = torch.randn(1,4,128,128)
styles = torch.randn(1, 512)
net2 = StyledConv2(4, 6, 3, 512, upsample=True)
# net2 = torch.nn.DataParallel(net2)
flops2, _ = profile(net2, (xx, styles, ))
#count_your_model(net2)
# print("decoder flops:::::::::", flops2/million)
# print (out.shape)
out = torch.reshape(out, (3, 256, 256))
out = out.detach().numpy().transpose((1, 2, 0))
out = out - np.min(out)/(np.max(out)-np.min(out))
out2 = torch.reshape(out2, (3, 256, 256))
out2 = out2.detach().numpy().transpose((1, 2, 0))
out2 = out2 - np.min(out2)/(np.max(out2)-np.min(out2))
plt.figure("haha")
plt.subplot(1,2,1), plt.title('a')
plt.imshow(out)
plt.subplot(1,2,2), plt.title('b')
plt.imshow(out2)
plt.show()
# realA = torch.randn(1,2,4,4)
# realA_filp=torch.flip(realA,[3])
# print(realA)
# print(realA_filp)
# print(z_bb)
# print(z.shape)
# print(cam_logit)
# print(z_bb)
# test()
| 33.785047
| 155
| 0.563685
| 8,656
| 65,070
| 4.066543
| 0.049561
| 0.030426
| 0.014716
| 0.014489
| 0.803466
| 0.778551
| 0.75429
| 0.729602
| 0.705057
| 0.68983
| 0
| 0.052129
| 0.302782
| 65,070
| 1,925
| 156
| 33.802597
| 0.723748
| 0.166728
| 0
| 0.624801
| 0
| 0.002385
| 0.01011
| 0.00614
| 0
| 0
| 0
| 0
| 0.00159
| 1
| 0.062798
| false
| 0
| 0.011129
| 0.003975
| 0.13434
| 0.004769
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 6
|
825aca8e583c9bcbe44869830072404ec776c882
| 96
|
py
|
Python
|
venv/lib/python3.8/site-packages/pyparsing/actions.py
|
Retraces/UkraineBot
|
3d5d7f8aaa58fa0cb8b98733b8808e5dfbdb8b71
|
[
"MIT"
] | 2
|
2022-03-13T01:58:52.000Z
|
2022-03-31T06:07:54.000Z
|
venv/lib/python3.8/site-packages/pyparsing/actions.py
|
DesmoSearch/Desmobot
|
b70b45df3485351f471080deb5c785c4bc5c4beb
|
[
"MIT"
] | 19
|
2021-11-20T04:09:18.000Z
|
2022-03-23T15:05:55.000Z
|
venv/lib/python3.8/site-packages/pyparsing/actions.py
|
DesmoSearch/Desmobot
|
b70b45df3485351f471080deb5c785c4bc5c4beb
|
[
"MIT"
] | null | null | null |
/home/runner/.cache/pip/pool/eb/4b/fb/9844ce073734d583c7fea403e62b1abe071226901f20aa73823337bda5
| 96
| 96
| 0.895833
| 9
| 96
| 9.555556
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.427083
| 0
| 96
| 1
| 96
| 96
| 0.46875
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| null | null | 0
| 0
| null | null | 0
| 1
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 1
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 1
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 6
|
82c31af5ccaf5c359c9f4a8f94fe27e875855cf3
| 15,571
|
py
|
Python
|
pyronear/models/resnet.py
|
jmaguiar/PyroNear
|
33b045a2cb392787ac00ccca8afbe5b63e62380c
|
[
"MIT"
] | null | null | null |
pyronear/models/resnet.py
|
jmaguiar/PyroNear
|
33b045a2cb392787ac00ccca8afbe5b63e62380c
|
[
"MIT"
] | null | null | null |
pyronear/models/resnet.py
|
jmaguiar/PyroNear
|
33b045a2cb392787ac00ccca8afbe5b63e62380c
|
[
"MIT"
] | null | null | null |
# -*- coding: utf-8 -*-
from torchvision.models.resnet import BasicBlock, Bottleneck, ResNet, model_urls as imagenet_urls
from torchvision.models.utils import load_state_dict_from_url
from .utils import cnn_model
__all__ = ['resnet18', 'resnet34', 'resnet50', 'resnet101',
'resnet152', 'resnext50_32x4d', 'resnext101_32x8d',
'wide_resnet50_2', 'wide_resnet101_2']
model_urls = {
'resnet18': 'https://srv-file6.gofile.io/download/5WANbz/resnet18-binary-classification.pth',
'resnet34': 'https://srv-file7.gofile.io/download/ay3i9I/resnet34-binary-classification.pth'
}
model_cut = -2
def _resnet(arch, block, layers, pretrained=False, progress=True,
imagenet_pretrained=False, num_classes=1, lin_features=512,
dropout_prob=0.5, bn_final=False, concat_pool=True, **kwargs):
# Model creation
base_model = ResNet(block, layers, num_classes=num_classes, **kwargs)
# Imagenet pretraining
if imagenet_pretrained:
if pretrained:
raise ValueError('imagenet_pretrained cannot be set to True if pretrained=True')
state_dict = load_state_dict_from_url(imagenet_urls[arch],
progress=progress)
# Remove FC params from dict
for key in ('fc.weight', 'fc.bias'):
state_dict.pop(key, None)
missing, unexpected = base_model.load_state_dict(state_dict, strict=False)
if any(unexpected) or any(not elt.startswith('fc.') for elt in missing):
raise KeyError(f"Missing parameters: {missing}\nUnexpected parameters: {unexpected}")
# Cut at last conv layers
model = cnn_model(base_model, model_cut, base_model.fc.in_features, num_classes,
lin_features, dropout_prob, bn_final=bn_final, concat_pool=concat_pool)
# Parameter loading
if pretrained:
state_dict = load_state_dict_from_url(model_urls[arch],
progress=progress)
model.load_state_dict(state_dict)
return model
def resnet18(pretrained=False, progress=True, imagenet_pretrained=False, num_classes=1,
lin_features=512, dropout_prob=0.5, bn_final=False, concat_pool=True, **kwargs):
r"""ResNet-18 model for image classification from
`"Deep Residual Learning for Image Recognition" <https://arxiv.org/pdf/1512.03385.pdf>`_
Args:
pretrained (bool, optional): should pretrained parameters be loaded (OpenFire training)
progress (bool, optional): should a progress bar be displayed while downloading pretrained parameters
imagenet_pretrained (bool, optional): should pretrained parameters be loaded on conv layers (ImageNet training)
num_classes (int, optional): number of output classes
lin_features (Union[int, list<int>], optional): number of nodes in intermediate layers of model's head
dropout_prob (float, optional): dropout probability of head FC layers
bn_final (bool, optional): should a batch norm be added after the last layer
concat_pool (bool, optional): should pooling be replaced by :mod:`pyronear.nn.AdaptiveConcatPool2d`
**kwargs: optional arguments of :mod:`torchvision.models.resnet.ResNet`
"""
return _resnet('resnet18', BasicBlock, [2, 2, 2, 2], pretrained, progress,
imagenet_pretrained, num_classes, lin_features, dropout_prob,
bn_final, concat_pool, **kwargs)
def resnet34(pretrained=False, progress=True, imagenet_pretrained=False, num_classes=1,
lin_features=512, dropout_prob=0.5, bn_final=False, concat_pool=True, **kwargs):
r"""ResNet-34 model for image classification from
`"Deep Residual Learning for Image Recognition" <https://arxiv.org/pdf/1512.03385.pdf>`_
Args:
pretrained (bool, optional): should pretrained parameters be loaded (OpenFire training)
progress (bool, optional): should a progress bar be displayed while downloading pretrained parameters
imagenet_pretrained (bool, optional): should pretrained parameters be loaded on conv layers (ImageNet training)
num_classes (int, optional): number of output classes
lin_features (Union[int, list<int>], optional): number of nodes in intermediate layers of model's head
dropout_prob (float, optional): dropout probability of head FC layers
bn_final (bool, optional): should a batch norm be added after the last layer
concat_pool (bool, optional): should pooling be replaced by :mod:`pyronear.nn.AdaptiveConcatPool2d`
**kwargs: optional arguments of :mod:`torchvision.models.resnet.ResNet`
"""
return _resnet('resnet34', BasicBlock, [3, 4, 6, 3], pretrained, progress,
imagenet_pretrained, num_classes, lin_features, dropout_prob,
bn_final, concat_pool, **kwargs)
def resnet50(pretrained=False, progress=True, imagenet_pretrained=False, num_classes=1,
lin_features=512, dropout_prob=0.5, bn_final=False, concat_pool=True, **kwargs):
r"""ResNet-50 model for image classification from
`"Deep Residual Learning for Image Recognition" <https://arxiv.org/pdf/1512.03385.pdf>`_
Args:
pretrained (bool, optional): should pretrained parameters be loaded (OpenFire training)
progress (bool, optional): should a progress bar be displayed while downloading pretrained parameters
imagenet_pretrained (bool, optional): should pretrained parameters be loaded on conv layers (ImageNet training)
num_classes (int, optional): number of output classes
lin_features (Union[int, list<int>], optional): number of nodes in intermediate layers of model's head
dropout_prob (float, optional): dropout probability of head FC layers
bn_final (bool, optional): should a batch norm be added after the last layer
concat_pool (bool, optional): should pooling be replaced by :mod:`pyronear.nn.AdaptiveConcatPool2d`
**kwargs: optional arguments of :mod:`torchvision.models.resnet.ResNet`
"""
return _resnet('resnet50', Bottleneck, [3, 4, 6, 3], pretrained, progress,
imagenet_pretrained, num_classes, lin_features, dropout_prob,
bn_final, concat_pool, **kwargs)
def resnet101(pretrained=False, progress=True, imagenet_pretrained=False, num_classes=1,
lin_features=512, dropout_prob=0.5, bn_final=False, concat_pool=True, **kwargs):
r"""ResNet-101 model for image classification from
`"Deep Residual Learning for Image Recognition" <https://arxiv.org/pdf/1512.03385.pdf>`_
Args:
pretrained (bool, optional): should pretrained parameters be loaded (OpenFire training)
progress (bool, optional): should a progress bar be displayed while downloading pretrained parameters
imagenet_pretrained (bool, optional): should pretrained parameters be loaded on conv layers (ImageNet training)
num_classes (int, optional): number of output classes
lin_features (Union[int, list<int>], optional): number of nodes in intermediate layers of model's head
dropout_prob (float, optional): dropout probability of head FC layers
bn_final (bool, optional): should a batch norm be added after the last layer
concat_pool (bool, optional): should pooling be replaced by :mod:`pyronear.nn.AdaptiveConcatPool2d`
**kwargs: optional arguments of :mod:`torchvision.models.resnet.ResNet`
"""
return _resnet('resnet101', Bottleneck, [3, 4, 23, 3], pretrained, progress,
imagenet_pretrained, num_classes, lin_features, dropout_prob,
bn_final, concat_pool, **kwargs)
def resnet152(pretrained=False, progress=True, imagenet_pretrained=False, num_classes=1,
lin_features=512, dropout_prob=0.5, bn_final=False, concat_pool=True, **kwargs):
r"""ResNet-152 model for image classification from
`"Deep Residual Learning for Image Recognition" <https://arxiv.org/pdf/1512.03385.pdf>`_
Args:
pretrained (bool, optional): should pretrained parameters be loaded (OpenFire training)
progress (bool, optional): should a progress bar be displayed while downloading pretrained parameters
imagenet_pretrained (bool, optional): should pretrained parameters be loaded on conv layers (ImageNet training)
num_classes (int, optional): number of output classes
lin_features (Union[int, list<int>], optional): number of nodes in intermediate layers of model's head
dropout_prob (float, optional): dropout probability of head FC layers
bn_final (bool, optional): should a batch norm be added after the last layer
concat_pool (bool, optional): should pooling be replaced by :mod:`pyronear.nn.AdaptiveConcatPool2d`
**kwargs: optional arguments of :mod:`torchvision.models.resnet.ResNet`
"""
return _resnet('resnet152', Bottleneck, [3, 8, 36, 3], pretrained, progress,
imagenet_pretrained, num_classes, lin_features, dropout_prob,
bn_final, concat_pool, **kwargs)
def resnext50_32x4d(pretrained=False, progress=True, imagenet_pretrained=False, num_classes=1,
lin_features=512, dropout_prob=0.5, bn_final=False, concat_pool=True, **kwargs):
r"""ResNeXt-50 32x4d model from
`"Aggregated Residual Transformation for Deep Neural Networks" <https://arxiv.org/pdf/1611.05431.pdf>`_
Args:
pretrained (bool, optional): should pretrained parameters be loaded (OpenFire training)
progress (bool, optional): should a progress bar be displayed while downloading pretrained parameters
imagenet_pretrained (bool, optional): should pretrained parameters be loaded on conv layers (ImageNet training)
num_classes (int, optional): number of output classes
lin_features (Union[int, list<int>], optional): number of nodes in intermediate layers of model's head
dropout_prob (float, optional): dropout probability of head FC layers
bn_final (bool, optional): should a batch norm be added after the last layer
concat_pool (bool, optional): should pooling be replaced by :mod:`pyronear.nn.AdaptiveConcatPool2d`
**kwargs: optional arguments of :mod:`torchvision.models.resnet.ResNet`
"""
kwargs['groups'] = 32
kwargs['width_per_group'] = 4
return _resnet('resnext50_32x4d', Bottleneck, [3, 4, 6, 3], pretrained, progress,
imagenet_pretrained, num_classes, lin_features, dropout_prob,
bn_final, concat_pool, **kwargs)
def resnext101_32x8d(pretrained=False, progress=True, imagenet_pretrained=False, num_classes=1,
lin_features=512, dropout_prob=0.5, bn_final=False, concat_pool=True, **kwargs):
r"""ResNeXt-101 32x8d model from
`"Aggregated Residual Transformation for Deep Neural Networks" <https://arxiv.org/pdf/1611.05431.pdf>`_
Args:
pretrained (bool, optional): should pretrained parameters be loaded (OpenFire training)
progress (bool, optional): should a progress bar be displayed while downloading pretrained parameters
imagenet_pretrained (bool, optional): should pretrained parameters be loaded on conv layers (ImageNet training)
num_classes (int, optional): number of output classes
lin_features (Union[int, list<int>], optional): number of nodes in intermediate layers of model's head
dropout_prob (float, optional): dropout probability of head FC layers
bn_final (bool, optional): should a batch norm be added after the last layer
concat_pool (bool, optional): should pooling be replaced by :mod:`pyronear.nn.AdaptiveConcatPool2d`
**kwargs: optional arguments of :mod:`torchvision.models.resnet.ResNet`
"""
kwargs['groups'] = 32
kwargs['width_per_group'] = 8
return _resnet('resnext101_32x8d', Bottleneck, [3, 4, 23, 3], pretrained, progress,
imagenet_pretrained, num_classes, lin_features, dropout_prob,
bn_final, concat_pool, **kwargs)
def wide_resnet50_2(pretrained=False, progress=True, imagenet_pretrained=False, num_classes=1,
lin_features=512, dropout_prob=0.5, bn_final=False, concat_pool=True, **kwargs):
r"""Wide ResNet-50-2 model from
`"Wide Residual Networks" <https://arxiv.org/pdf/1605.07146.pdf>`_
The model is the same as ResNet except for the bottleneck number of channels
which is twice larger in every block. The number of channels in outer 1x1
convolutions is the same, e.g. last block in ResNet-50 has 2048-512-2048
channels, and in Wide ResNet-50-2 has 2048-1024-2048.
Args:
pretrained (bool, optional): should pretrained parameters be loaded (OpenFire training)
progress (bool, optional): should a progress bar be displayed while downloading pretrained parameters
imagenet_pretrained (bool, optional): should pretrained parameters be loaded on conv layers (ImageNet training)
num_classes (int, optional): number of output classes
lin_features (Union[int, list<int>], optional): number of nodes in intermediate layers of model's head
dropout_prob (float, optional): dropout probability of head FC layers
bn_final (bool, optional): should a batch norm be added after the last layer
concat_pool (bool, optional): should pooling be replaced by :mod:`pyronear.nn.AdaptiveConcatPool2d`
**kwargs: optional arguments of :mod:`torchvision.models.resnet.ResNet`
"""
kwargs['width_per_group'] = 64 * 2
return _resnet('wide_resnet50_2', Bottleneck, [3, 4, 6, 3], pretrained, progress,
imagenet_pretrained, num_classes, lin_features, dropout_prob,
bn_final, concat_pool, **kwargs)
def wide_resnet101_2(pretrained=False, progress=True, imagenet_pretrained=False, num_classes=1,
lin_features=512, dropout_prob=0.5, bn_final=False, concat_pool=True, **kwargs):
r"""Wide ResNet-101-2 model from
`"Wide Residual Networks" <https://arxiv.org/pdf/1605.07146.pdf>`_
The model is the same as ResNet except for the bottleneck number of channels
which is twice larger in every block. The number of channels in outer 1x1
convolutions is the same, e.g. last block in ResNet-50 has 2048-512-2048
channels, and in Wide ResNet-50-2 has 2048-1024-2048.
Args:
pretrained (bool, optional): should pretrained parameters be loaded (OpenFire training)
progress (bool, optional): should a progress bar be displayed while downloading pretrained parameters
imagenet_pretrained (bool, optional): should pretrained parameters be loaded on conv layers (ImageNet training)
num_classes (int, optional): number of output classes
lin_features (Union[int, list<int>], optional): number of nodes in intermediate layers of model's head
dropout_prob (float, optional): dropout probability of head FC layers
bn_final (bool, optional): should a batch norm be added after the last layer
concat_pool (bool, optional): should pooling be replaced by :mod:`pyronear.nn.AdaptiveConcatPool2d`
**kwargs: optional arguments of :mod:`torchvision.models.resnet.ResNet`
"""
kwargs['width_per_group'] = 64 * 2
return _resnet('wide_resnet101_2', Bottleneck, [3, 4, 23, 3], pretrained, progress,
imagenet_pretrained, num_classes, lin_features, dropout_prob,
bn_final, concat_pool, **kwargs)
| 61.062745
| 119
| 0.707919
| 2,002
| 15,571
| 5.374625
| 0.098901
| 0.050186
| 0.075279
| 0.04684
| 0.87816
| 0.876301
| 0.871283
| 0.865892
| 0.862268
| 0.862268
| 0
| 0.032001
| 0.203262
| 15,571
| 254
| 120
| 61.30315
| 0.835322
| 0.591292
| 0
| 0.380435
| 0
| 0.021739
| 0.102226
| 0.003596
| 0
| 0
| 0
| 0
| 0
| 1
| 0.108696
| false
| 0
| 0.032609
| 0
| 0.25
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 6
|
7d59fa6c0cf56fd2a61084b5eb25b162b3b28d14
| 166
|
py
|
Python
|
zb/tools/__init__.py
|
zengbin93/zb
|
f3b7d4542f7ae6eecefbdbfcfb727339ba07d9c4
|
[
"Apache-2.0"
] | 2
|
2018-04-07T12:33:02.000Z
|
2019-11-27T04:41:15.000Z
|
zb/tools/__init__.py
|
zengbin93/zb
|
f3b7d4542f7ae6eecefbdbfcfb727339ba07d9c4
|
[
"Apache-2.0"
] | 1
|
2018-08-12T02:58:59.000Z
|
2018-08-12T02:58:59.000Z
|
zb/tools/__init__.py
|
zengbin93/zb
|
f3b7d4542f7ae6eecefbdbfcfb727339ba07d9c4
|
[
"Apache-2.0"
] | 1
|
2019-11-27T04:41:18.000Z
|
2019-11-27T04:41:18.000Z
|
# -*- coding: utf-8 -*-
from .sms import server_chan_push, bear_push, EmailSender
from .file import read_file, write_file, empty_file
from .file import legitimize
| 20.75
| 57
| 0.759036
| 25
| 166
| 4.8
| 0.64
| 0.133333
| 0.233333
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.007042
| 0.144578
| 166
| 7
| 58
| 23.714286
| 0.838028
| 0.126506
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| true
| 0
| 1
| 0
| 1
| 0
| 1
| 0
| 0
| null | 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 0
| 1
| 0
|
0
| 6
|
7d5c1d7bdf38cf66bc348fc9c0f081cfe7d5a6e3
| 288
|
py
|
Python
|
src/web/modules/finance/models/__init__.py
|
petr-kalinin/SIStema
|
80e8e637ddddab3cf634f3a3cd7492f122e1e1d3
|
[
"MIT"
] | null | null | null |
src/web/modules/finance/models/__init__.py
|
petr-kalinin/SIStema
|
80e8e637ddddab3cf634f3a3cd7492f122e1e1d3
|
[
"MIT"
] | null | null | null |
src/web/modules/finance/models/__init__.py
|
petr-kalinin/SIStema
|
80e8e637ddddab3cf634f3a3cd7492f122e1e1d3
|
[
"MIT"
] | null | null | null |
from modules.finance.models.discount import Discount
from modules.finance.models.payment_amount import PaymentAmount
from modules.finance.models.documents import *
from modules.finance.questionnaire.blocks import PaymentInfoQuestionnaireBlock
from modules.finance.entrance.steps import *
| 48
| 78
| 0.871528
| 34
| 288
| 7.352941
| 0.441176
| 0.22
| 0.36
| 0.288
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.069444
| 288
| 5
| 79
| 57.6
| 0.932836
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| true
| 0
| 1
| 0
| 1
| 0
| 0
| 0
| 0
| null | 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 0
| 0
| 0
|
0
| 6
|
7d5f7af60978307e640a41b3935b5b86aec705e3
| 163
|
py
|
Python
|
Code/instr_tester/ui.py
|
DaveSeidel/QB_Nebulae_V2
|
4a0218bb6a05e835e74b126729a1c3cd221fc9b5
|
[
"MIT"
] | 40
|
2019-12-30T03:44:36.000Z
|
2022-02-07T23:09:42.000Z
|
Code/instr_tester/ui.py
|
alex-thibodeau/QB_Nebulae_V2
|
34bcf341ea8eddaa9f9ce2e7c2d2438e00e50f54
|
[
"MIT"
] | 11
|
2020-03-08T10:22:57.000Z
|
2022-03-22T21:18:32.000Z
|
Code/instr_tester/ui.py
|
alex-thibodeau/QB_Nebulae_V2
|
34bcf341ea8eddaa9f9ce2e7c2d2438e00e50f54
|
[
"MIT"
] | 23
|
2020-01-20T11:12:20.000Z
|
2022-03-02T20:39:09.000Z
|
import time
class UserInterface(object):
def __init__(self, controlhandler):
self.controlhandler = controlhandler
def update(self):
pass
| 18.111111
| 44
| 0.687117
| 16
| 163
| 6.75
| 0.6875
| 0.333333
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.239264
| 163
| 8
| 45
| 20.375
| 0.870968
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0.333333
| false
| 0.166667
| 0.166667
| 0
| 0.666667
| 0
| 1
| 0
| 0
| null | 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 0
| 0
| 1
| 0
|
0
| 6
|
7d76ebf919a2aa7394fa0d657ad7beac4aea6e63
| 13,336
|
py
|
Python
|
sdk/python/pulumi_scaleway/instance_security_group_rules.py
|
stack72/pulumi-scaleway
|
0242d1f058046f86fe4ea6f106872ecd08d10c3b
|
[
"ECL-2.0",
"Apache-2.0"
] | 6
|
2020-10-16T09:09:05.000Z
|
2022-03-24T21:32:17.000Z
|
sdk/python/pulumi_scaleway/instance_security_group_rules.py
|
stack72/pulumi-scaleway
|
0242d1f058046f86fe4ea6f106872ecd08d10c3b
|
[
"ECL-2.0",
"Apache-2.0"
] | 34
|
2020-10-29T17:38:13.000Z
|
2022-03-31T13:33:47.000Z
|
sdk/python/pulumi_scaleway/instance_security_group_rules.py
|
stack72/pulumi-scaleway
|
0242d1f058046f86fe4ea6f106872ecd08d10c3b
|
[
"ECL-2.0",
"Apache-2.0"
] | 2
|
2022-01-13T18:46:32.000Z
|
2022-02-28T03:58:36.000Z
|
# coding=utf-8
# *** WARNING: this file was generated by the Pulumi Terraform Bridge (tfgen) Tool. ***
# *** Do not edit by hand unless you're certain you know what you are doing! ***
import warnings
import pulumi
import pulumi.runtime
from typing import Any, Mapping, Optional, Sequence, Union, overload
from . import _utilities
from . import outputs
from ._inputs import *
__all__ = ['InstanceSecurityGroupRulesArgs', 'InstanceSecurityGroupRules']
@pulumi.input_type
class InstanceSecurityGroupRulesArgs:
def __init__(__self__, *,
security_group_id: pulumi.Input[str],
inbound_rules: Optional[pulumi.Input[Sequence[pulumi.Input['InstanceSecurityGroupRulesInboundRuleArgs']]]] = None,
outbound_rules: Optional[pulumi.Input[Sequence[pulumi.Input['InstanceSecurityGroupRulesOutboundRuleArgs']]]] = None):
"""
The set of arguments for constructing a InstanceSecurityGroupRules resource.
:param pulumi.Input[str] security_group_id: The ID of the security group.
:param pulumi.Input[Sequence[pulumi.Input['InstanceSecurityGroupRulesInboundRuleArgs']]] inbound_rules: A list of inbound rule to add to the security group. (Structure is documented below.)
:param pulumi.Input[Sequence[pulumi.Input['InstanceSecurityGroupRulesOutboundRuleArgs']]] outbound_rules: A list of outbound rule to add to the security group. (Structure is documented below.)
"""
pulumi.set(__self__, "security_group_id", security_group_id)
if inbound_rules is not None:
pulumi.set(__self__, "inbound_rules", inbound_rules)
if outbound_rules is not None:
pulumi.set(__self__, "outbound_rules", outbound_rules)
@property
@pulumi.getter(name="securityGroupId")
def security_group_id(self) -> pulumi.Input[str]:
"""
The ID of the security group.
"""
return pulumi.get(self, "security_group_id")
@security_group_id.setter
def security_group_id(self, value: pulumi.Input[str]):
pulumi.set(self, "security_group_id", value)
@property
@pulumi.getter(name="inboundRules")
def inbound_rules(self) -> Optional[pulumi.Input[Sequence[pulumi.Input['InstanceSecurityGroupRulesInboundRuleArgs']]]]:
"""
A list of inbound rule to add to the security group. (Structure is documented below.)
"""
return pulumi.get(self, "inbound_rules")
@inbound_rules.setter
def inbound_rules(self, value: Optional[pulumi.Input[Sequence[pulumi.Input['InstanceSecurityGroupRulesInboundRuleArgs']]]]):
pulumi.set(self, "inbound_rules", value)
@property
@pulumi.getter(name="outboundRules")
def outbound_rules(self) -> Optional[pulumi.Input[Sequence[pulumi.Input['InstanceSecurityGroupRulesOutboundRuleArgs']]]]:
"""
A list of outbound rule to add to the security group. (Structure is documented below.)
"""
return pulumi.get(self, "outbound_rules")
@outbound_rules.setter
def outbound_rules(self, value: Optional[pulumi.Input[Sequence[pulumi.Input['InstanceSecurityGroupRulesOutboundRuleArgs']]]]):
pulumi.set(self, "outbound_rules", value)
@pulumi.input_type
class _InstanceSecurityGroupRulesState:
def __init__(__self__, *,
inbound_rules: Optional[pulumi.Input[Sequence[pulumi.Input['InstanceSecurityGroupRulesInboundRuleArgs']]]] = None,
outbound_rules: Optional[pulumi.Input[Sequence[pulumi.Input['InstanceSecurityGroupRulesOutboundRuleArgs']]]] = None,
security_group_id: Optional[pulumi.Input[str]] = None):
"""
Input properties used for looking up and filtering InstanceSecurityGroupRules resources.
:param pulumi.Input[Sequence[pulumi.Input['InstanceSecurityGroupRulesInboundRuleArgs']]] inbound_rules: A list of inbound rule to add to the security group. (Structure is documented below.)
:param pulumi.Input[Sequence[pulumi.Input['InstanceSecurityGroupRulesOutboundRuleArgs']]] outbound_rules: A list of outbound rule to add to the security group. (Structure is documented below.)
:param pulumi.Input[str] security_group_id: The ID of the security group.
"""
if inbound_rules is not None:
pulumi.set(__self__, "inbound_rules", inbound_rules)
if outbound_rules is not None:
pulumi.set(__self__, "outbound_rules", outbound_rules)
if security_group_id is not None:
pulumi.set(__self__, "security_group_id", security_group_id)
@property
@pulumi.getter(name="inboundRules")
def inbound_rules(self) -> Optional[pulumi.Input[Sequence[pulumi.Input['InstanceSecurityGroupRulesInboundRuleArgs']]]]:
"""
A list of inbound rule to add to the security group. (Structure is documented below.)
"""
return pulumi.get(self, "inbound_rules")
@inbound_rules.setter
def inbound_rules(self, value: Optional[pulumi.Input[Sequence[pulumi.Input['InstanceSecurityGroupRulesInboundRuleArgs']]]]):
pulumi.set(self, "inbound_rules", value)
@property
@pulumi.getter(name="outboundRules")
def outbound_rules(self) -> Optional[pulumi.Input[Sequence[pulumi.Input['InstanceSecurityGroupRulesOutboundRuleArgs']]]]:
"""
A list of outbound rule to add to the security group. (Structure is documented below.)
"""
return pulumi.get(self, "outbound_rules")
@outbound_rules.setter
def outbound_rules(self, value: Optional[pulumi.Input[Sequence[pulumi.Input['InstanceSecurityGroupRulesOutboundRuleArgs']]]]):
pulumi.set(self, "outbound_rules", value)
@property
@pulumi.getter(name="securityGroupId")
def security_group_id(self) -> Optional[pulumi.Input[str]]:
"""
The ID of the security group.
"""
return pulumi.get(self, "security_group_id")
@security_group_id.setter
def security_group_id(self, value: Optional[pulumi.Input[str]]):
pulumi.set(self, "security_group_id", value)
class InstanceSecurityGroupRules(pulumi.CustomResource):
@overload
def __init__(__self__,
resource_name: str,
opts: Optional[pulumi.ResourceOptions] = None,
inbound_rules: Optional[pulumi.Input[Sequence[pulumi.Input[pulumi.InputType['InstanceSecurityGroupRulesInboundRuleArgs']]]]] = None,
outbound_rules: Optional[pulumi.Input[Sequence[pulumi.Input[pulumi.InputType['InstanceSecurityGroupRulesOutboundRuleArgs']]]]] = None,
security_group_id: Optional[pulumi.Input[str]] = None,
__props__=None):
"""
## Import
Instance security group rules can be imported using the `{zone}/{id}`, e.g. bash
```sh
$ pulumi import scaleway:index/instanceSecurityGroupRules:InstanceSecurityGroupRules web fr-par-1/11111111-1111-1111-1111-111111111111
```
:param str resource_name: The name of the resource.
:param pulumi.ResourceOptions opts: Options for the resource.
:param pulumi.Input[Sequence[pulumi.Input[pulumi.InputType['InstanceSecurityGroupRulesInboundRuleArgs']]]] inbound_rules: A list of inbound rule to add to the security group. (Structure is documented below.)
:param pulumi.Input[Sequence[pulumi.Input[pulumi.InputType['InstanceSecurityGroupRulesOutboundRuleArgs']]]] outbound_rules: A list of outbound rule to add to the security group. (Structure is documented below.)
:param pulumi.Input[str] security_group_id: The ID of the security group.
"""
...
@overload
def __init__(__self__,
resource_name: str,
args: InstanceSecurityGroupRulesArgs,
opts: Optional[pulumi.ResourceOptions] = None):
"""
## Import
Instance security group rules can be imported using the `{zone}/{id}`, e.g. bash
```sh
$ pulumi import scaleway:index/instanceSecurityGroupRules:InstanceSecurityGroupRules web fr-par-1/11111111-1111-1111-1111-111111111111
```
:param str resource_name: The name of the resource.
:param InstanceSecurityGroupRulesArgs args: The arguments to use to populate this resource's properties.
:param pulumi.ResourceOptions opts: Options for the resource.
"""
...
def __init__(__self__, resource_name: str, *args, **kwargs):
resource_args, opts = _utilities.get_resource_args_opts(InstanceSecurityGroupRulesArgs, pulumi.ResourceOptions, *args, **kwargs)
if resource_args is not None:
__self__._internal_init(resource_name, opts, **resource_args.__dict__)
else:
__self__._internal_init(resource_name, *args, **kwargs)
def _internal_init(__self__,
resource_name: str,
opts: Optional[pulumi.ResourceOptions] = None,
inbound_rules: Optional[pulumi.Input[Sequence[pulumi.Input[pulumi.InputType['InstanceSecurityGroupRulesInboundRuleArgs']]]]] = None,
outbound_rules: Optional[pulumi.Input[Sequence[pulumi.Input[pulumi.InputType['InstanceSecurityGroupRulesOutboundRuleArgs']]]]] = None,
security_group_id: Optional[pulumi.Input[str]] = None,
__props__=None):
if opts is None:
opts = pulumi.ResourceOptions()
if not isinstance(opts, pulumi.ResourceOptions):
raise TypeError('Expected resource options to be a ResourceOptions instance')
if opts.version is None:
opts.version = _utilities.get_version()
if opts.id is None:
if __props__ is not None:
raise TypeError('__props__ is only valid when passed in combination with a valid opts.id to get an existing resource')
__props__ = InstanceSecurityGroupRulesArgs.__new__(InstanceSecurityGroupRulesArgs)
__props__.__dict__["inbound_rules"] = inbound_rules
__props__.__dict__["outbound_rules"] = outbound_rules
if security_group_id is None and not opts.urn:
raise TypeError("Missing required property 'security_group_id'")
__props__.__dict__["security_group_id"] = security_group_id
super(InstanceSecurityGroupRules, __self__).__init__(
'scaleway:index/instanceSecurityGroupRules:InstanceSecurityGroupRules',
resource_name,
__props__,
opts)
@staticmethod
def get(resource_name: str,
id: pulumi.Input[str],
opts: Optional[pulumi.ResourceOptions] = None,
inbound_rules: Optional[pulumi.Input[Sequence[pulumi.Input[pulumi.InputType['InstanceSecurityGroupRulesInboundRuleArgs']]]]] = None,
outbound_rules: Optional[pulumi.Input[Sequence[pulumi.Input[pulumi.InputType['InstanceSecurityGroupRulesOutboundRuleArgs']]]]] = None,
security_group_id: Optional[pulumi.Input[str]] = None) -> 'InstanceSecurityGroupRules':
"""
Get an existing InstanceSecurityGroupRules resource's state with the given name, id, and optional extra
properties used to qualify the lookup.
:param str resource_name: The unique name of the resulting resource.
:param pulumi.Input[str] id: The unique provider ID of the resource to lookup.
:param pulumi.ResourceOptions opts: Options for the resource.
:param pulumi.Input[Sequence[pulumi.Input[pulumi.InputType['InstanceSecurityGroupRulesInboundRuleArgs']]]] inbound_rules: A list of inbound rule to add to the security group. (Structure is documented below.)
:param pulumi.Input[Sequence[pulumi.Input[pulumi.InputType['InstanceSecurityGroupRulesOutboundRuleArgs']]]] outbound_rules: A list of outbound rule to add to the security group. (Structure is documented below.)
:param pulumi.Input[str] security_group_id: The ID of the security group.
"""
opts = pulumi.ResourceOptions.merge(opts, pulumi.ResourceOptions(id=id))
__props__ = _InstanceSecurityGroupRulesState.__new__(_InstanceSecurityGroupRulesState)
__props__.__dict__["inbound_rules"] = inbound_rules
__props__.__dict__["outbound_rules"] = outbound_rules
__props__.__dict__["security_group_id"] = security_group_id
return InstanceSecurityGroupRules(resource_name, opts=opts, __props__=__props__)
@property
@pulumi.getter(name="inboundRules")
def inbound_rules(self) -> pulumi.Output[Optional[Sequence['outputs.InstanceSecurityGroupRulesInboundRule']]]:
"""
A list of inbound rule to add to the security group. (Structure is documented below.)
"""
return pulumi.get(self, "inbound_rules")
@property
@pulumi.getter(name="outboundRules")
def outbound_rules(self) -> pulumi.Output[Optional[Sequence['outputs.InstanceSecurityGroupRulesOutboundRule']]]:
"""
A list of outbound rule to add to the security group. (Structure is documented below.)
"""
return pulumi.get(self, "outbound_rules")
@property
@pulumi.getter(name="securityGroupId")
def security_group_id(self) -> pulumi.Output[str]:
"""
The ID of the security group.
"""
return pulumi.get(self, "security_group_id")
| 51.891051
| 218
| 0.697885
| 1,423
| 13,336
| 6.298665
| 0.112439
| 0.084681
| 0.053554
| 0.07252
| 0.7676
| 0.750307
| 0.749526
| 0.732902
| 0.718509
| 0.692179
| 0
| 0.006322
| 0.205309
| 13,336
| 256
| 219
| 52.09375
| 0.839404
| 0.30144
| 0
| 0.619048
| 1
| 0
| 0.194793
| 0.112811
| 0
| 0
| 0
| 0
| 0
| 1
| 0.14966
| false
| 0.006803
| 0.047619
| 0
| 0.285714
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 6
|
7db7edd52f8fab919994fc652ca5cda9cd561a7a
| 69
|
py
|
Python
|
tests/test_boilerplate.py
|
alexbahnisch/python-boilerplate
|
e836158ed911b591c303d32171be4739b77b9e34
|
[
"MIT"
] | 1
|
2017-07-07T20:31:28.000Z
|
2017-07-07T20:31:28.000Z
|
tests/test_boilerplate.py
|
alexbahnisch/python3-boilerplate
|
e836158ed911b591c303d32171be4739b77b9e34
|
[
"MIT"
] | null | null | null |
tests/test_boilerplate.py
|
alexbahnisch/python3-boilerplate
|
e836158ed911b591c303d32171be4739b77b9e34
|
[
"MIT"
] | null | null | null |
from boilerplate import fun
def test_fun():
assert fun(1) == 1
| 11.5
| 27
| 0.666667
| 11
| 69
| 4.090909
| 0.727273
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.037736
| 0.231884
| 69
| 5
| 28
| 13.8
| 0.811321
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.333333
| 1
| 0.333333
| true
| 0
| 0.333333
| 0
| 0.666667
| 0
| 1
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 1
| 1
| 0
| 1
| 0
| 1
| 0
|
0
| 6
|
7db843f48cbb72ee5da6a7cb3f4c0c22515935cb
| 203
|
py
|
Python
|
src/auctionsite/main.py
|
Hbattle20/djangoauctionsite
|
5cf929800403e103640be54ab52345d9a0a28220
|
[
"MIT"
] | null | null | null |
src/auctionsite/main.py
|
Hbattle20/djangoauctionsite
|
5cf929800403e103640be54ab52345d9a0a28220
|
[
"MIT"
] | null | null | null |
src/auctionsite/main.py
|
Hbattle20/djangoauctionsite
|
5cf929800403e103640be54ab52345d9a0a28220
|
[
"MIT"
] | null | null | null |
def get_price(auction_id):
return 0
def make_bid(bid, auction_id):
auction_price = get_price(auction_id)
if bid <= auction_price:
return False
auction_price = bid
return True
| 22.555556
| 41
| 0.689655
| 30
| 203
| 4.366667
| 0.4
| 0.206107
| 0.229008
| 0.259542
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.006494
| 0.241379
| 203
| 9
| 42
| 22.555556
| 0.844156
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0.25
| false
| 0
| 0
| 0.125
| 0.625
| 0
| 0
| 0
| 0
| null | 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 1
| 0
| 0
|
0
| 6
|
7debd998da7331ae40399b73d9662e9c7ca9ee45
| 107
|
py
|
Python
|
src/winnow/models/process.py
|
opendesk/schema
|
9c6d8483abbfeb5e3bd225b2916c212e2c21872b
|
[
"Unlicense"
] | 3
|
2016-04-05T16:51:27.000Z
|
2016-08-30T20:56:25.000Z
|
src/winnow/models/process.py
|
opendesk/schema
|
9c6d8483abbfeb5e3bd225b2916c212e2c21872b
|
[
"Unlicense"
] | null | null | null |
src/winnow/models/process.py
|
opendesk/schema
|
9c6d8483abbfeb5e3bd225b2916c212e2c21872b
|
[
"Unlicense"
] | 1
|
2019-08-14T16:51:56.000Z
|
2019-08-14T16:51:56.000Z
|
import winnow
from winnow.models.base import WinnowVersion
class WinnowProcess(WinnowVersion):
pass
| 13.375
| 44
| 0.803738
| 12
| 107
| 7.166667
| 0.75
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.149533
| 107
| 7
| 45
| 15.285714
| 0.945055
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| true
| 0.25
| 0.5
| 0
| 0.75
| 0
| 1
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 1
| 1
| 0
| 1
| 0
|
0
| 6
|
8161c86daa0145dcdb266b0069018cf1111c3e07
| 19
|
py
|
Python
|
space/old/units.py
|
mobarski/sandbox
|
64ac79143750d5dcbd4d0f3abdab6efeb9bdf50c
|
[
"MIT"
] | null | null | null |
space/old/units.py
|
mobarski/sandbox
|
64ac79143750d5dcbd4d0f3abdab6efeb9bdf50c
|
[
"MIT"
] | null | null | null |
space/old/units.py
|
mobarski/sandbox
|
64ac79143750d5dcbd4d0f3abdab6efeb9bdf50c
|
[
"MIT"
] | null | null | null |
u = 'kg * m / s**2'
| 19
| 19
| 0.315789
| 5
| 19
| 1.2
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.076923
| 0.315789
| 19
| 1
| 19
| 19
| 0.384615
| 0
| 0
| 0
| 0
| 0
| 0.65
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| false
| 0
| 0
| 0
| 0
| 0
| 1
| 1
| 1
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 6
|
81639b94b78faa5edd9bcd71e5b23d4c766e1ef9
| 236
|
py
|
Python
|
angr/analyses/decompiler/structured_codegen/__init__.py
|
mikenawrocki/angr
|
57f5593e902f5ad58709bc8f4ce7859134300ffb
|
[
"BSD-2-Clause"
] | 1
|
2021-12-20T14:48:33.000Z
|
2021-12-20T14:48:33.000Z
|
angr/analyses/decompiler/structured_codegen/__init__.py
|
mikenawrocki/angr
|
57f5593e902f5ad58709bc8f4ce7859134300ffb
|
[
"BSD-2-Clause"
] | null | null | null |
angr/analyses/decompiler/structured_codegen/__init__.py
|
mikenawrocki/angr
|
57f5593e902f5ad58709bc8f4ce7859134300ffb
|
[
"BSD-2-Clause"
] | null | null | null |
from .base import BaseStructuredCodeGenerator, InstructionMapping, InstructionMappingElement, PositionMappingElement,\
PositionMapping
from .c import CStructuredCodeGenerator
from .dwarf_import import ImportSourceCode
| 47.2
| 118
| 0.822034
| 17
| 236
| 11.352941
| 0.705882
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.144068
| 236
| 4
| 119
| 59
| 0.955446
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| true
| 0
| 0.75
| 0
| 0.75
| 0
| 1
| 0
| 1
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 0
| 1
| 0
|
0
| 6
|
c499bbcdafc7951d9d7c03c4ab1daa97f311d39a
| 21
|
py
|
Python
|
fzfaws/ec2/__init__.py
|
kazhala/fawsf
|
4abefb2301f7b489b11ed3f0b303faafa5941d5b
|
[
"MIT"
] | 66
|
2020-07-26T12:43:30.000Z
|
2022-01-25T12:09:40.000Z
|
fzfaws/ec2/__init__.py
|
kazhala/fawsf
|
4abefb2301f7b489b11ed3f0b303faafa5941d5b
|
[
"MIT"
] | null | null | null |
fzfaws/ec2/__init__.py
|
kazhala/fawsf
|
4abefb2301f7b489b11ed3f0b303faafa5941d5b
|
[
"MIT"
] | 3
|
2020-07-26T22:09:45.000Z
|
2020-07-28T01:09:26.000Z
|
from .ec2 import EC2
| 10.5
| 20
| 0.761905
| 4
| 21
| 4
| 0.75
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.117647
| 0.190476
| 21
| 1
| 21
| 21
| 0.823529
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| true
| 0
| 1
| 0
| 1
| 0
| 1
| 1
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 0
| 1
| 0
|
0
| 6
|
c4b15916ba3b8fb40a1e33fef2d39410c93af2e6
| 70,178
|
py
|
Python
|
cottonformation/res/codedeploy.py
|
MacHu-GWU/cottonformation-project
|
23e28c08cfb5a7cc0db6dbfdb1d7e1585c773f3b
|
[
"BSD-2-Clause"
] | 5
|
2021-07-22T03:45:59.000Z
|
2021-12-17T21:07:14.000Z
|
cottonformation/res/codedeploy.py
|
MacHu-GWU/cottonformation-project
|
23e28c08cfb5a7cc0db6dbfdb1d7e1585c773f3b
|
[
"BSD-2-Clause"
] | 1
|
2021-06-25T18:01:31.000Z
|
2021-06-25T18:01:31.000Z
|
cottonformation/res/codedeploy.py
|
MacHu-GWU/cottonformation-project
|
23e28c08cfb5a7cc0db6dbfdb1d7e1585c773f3b
|
[
"BSD-2-Clause"
] | 2
|
2021-06-27T03:08:21.000Z
|
2021-06-28T22:15:51.000Z
|
# -*- coding: utf-8 -*-
"""
This module
"""
import attr
import typing
from ..core.model import (
Property, Resource, Tag, GetAtt, TypeHint, TypeCheck,
)
from ..core.constant import AttrMeta
#--- Property declaration ---
@attr.s
class PropDeploymentConfigTimeBasedCanary(Property):
"""
AWS Object Type = "AWS::CodeDeploy::DeploymentConfig.TimeBasedCanary"
Resource Document: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-properties-codedeploy-deploymentconfig-timebasedcanary.html
Property Document:
- ``rp_CanaryInterval``: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-properties-codedeploy-deploymentconfig-timebasedcanary.html#cfn-properties-codedeploy-deploymentconfig-trafficroutingconfig-timebasedcanary-canaryinterval
- ``rp_CanaryPercentage``: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-properties-codedeploy-deploymentconfig-timebasedcanary.html#cfn-properties-codedeploy-deploymentconfig-trafficroutingconfig-timebasedcanary-canarypercentage
"""
AWS_OBJECT_TYPE = "AWS::CodeDeploy::DeploymentConfig.TimeBasedCanary"
rp_CanaryInterval: int = attr.ib(
default=None,
validator=attr.validators.instance_of(int),
metadata={AttrMeta.PROPERTY_NAME: "CanaryInterval"},
)
"""Doc: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-properties-codedeploy-deploymentconfig-timebasedcanary.html#cfn-properties-codedeploy-deploymentconfig-trafficroutingconfig-timebasedcanary-canaryinterval"""
rp_CanaryPercentage: int = attr.ib(
default=None,
validator=attr.validators.instance_of(int),
metadata={AttrMeta.PROPERTY_NAME: "CanaryPercentage"},
)
"""Doc: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-properties-codedeploy-deploymentconfig-timebasedcanary.html#cfn-properties-codedeploy-deploymentconfig-trafficroutingconfig-timebasedcanary-canarypercentage"""
@attr.s
class PropDeploymentGroupTargetGroupInfo(Property):
"""
AWS Object Type = "AWS::CodeDeploy::DeploymentGroup.TargetGroupInfo"
Resource Document: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-properties-codedeploy-deploymentgroup-targetgroupinfo.html
Property Document:
- ``p_Name``: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-properties-codedeploy-deploymentgroup-targetgroupinfo.html#cfn-codedeploy-deploymentgroup-targetgroupinfo-name
"""
AWS_OBJECT_TYPE = "AWS::CodeDeploy::DeploymentGroup.TargetGroupInfo"
p_Name: TypeHint.intrinsic_str = attr.ib(
default=None,
validator=attr.validators.optional(attr.validators.instance_of(TypeCheck.intrinsic_str_type)),
metadata={AttrMeta.PROPERTY_NAME: "Name"},
)
"""Doc: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-properties-codedeploy-deploymentgroup-targetgroupinfo.html#cfn-codedeploy-deploymentgroup-targetgroupinfo-name"""
@attr.s
class PropDeploymentGroupDeploymentStyle(Property):
"""
AWS Object Type = "AWS::CodeDeploy::DeploymentGroup.DeploymentStyle"
Resource Document: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-properties-codedeploy-deploymentgroup-deploymentstyle.html
Property Document:
- ``p_DeploymentOption``: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-properties-codedeploy-deploymentgroup-deploymentstyle.html#cfn-codedeploy-deploymentgroup-deploymentstyle-deploymentoption
- ``p_DeploymentType``: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-properties-codedeploy-deploymentgroup-deploymentstyle.html#cfn-codedeploy-deploymentgroup-deploymentstyle-deploymenttype
"""
AWS_OBJECT_TYPE = "AWS::CodeDeploy::DeploymentGroup.DeploymentStyle"
p_DeploymentOption: TypeHint.intrinsic_str = attr.ib(
default=None,
validator=attr.validators.optional(attr.validators.instance_of(TypeCheck.intrinsic_str_type)),
metadata={AttrMeta.PROPERTY_NAME: "DeploymentOption"},
)
"""Doc: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-properties-codedeploy-deploymentgroup-deploymentstyle.html#cfn-codedeploy-deploymentgroup-deploymentstyle-deploymentoption"""
p_DeploymentType: TypeHint.intrinsic_str = attr.ib(
default=None,
validator=attr.validators.optional(attr.validators.instance_of(TypeCheck.intrinsic_str_type)),
metadata={AttrMeta.PROPERTY_NAME: "DeploymentType"},
)
"""Doc: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-properties-codedeploy-deploymentgroup-deploymentstyle.html#cfn-codedeploy-deploymentgroup-deploymentstyle-deploymenttype"""
@attr.s
class PropDeploymentGroupEC2TagFilter(Property):
"""
AWS Object Type = "AWS::CodeDeploy::DeploymentGroup.EC2TagFilter"
Resource Document: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-properties-codedeploy-deploymentgroup-ec2tagfilter.html
Property Document:
- ``p_Key``: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-properties-codedeploy-deploymentgroup-ec2tagfilter.html#cfn-codedeploy-deploymentgroup-ec2tagfilter-key
- ``p_Type``: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-properties-codedeploy-deploymentgroup-ec2tagfilter.html#cfn-codedeploy-deploymentgroup-ec2tagfilter-type
- ``p_Value``: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-properties-codedeploy-deploymentgroup-ec2tagfilter.html#cfn-codedeploy-deploymentgroup-ec2tagfilter-value
"""
AWS_OBJECT_TYPE = "AWS::CodeDeploy::DeploymentGroup.EC2TagFilter"
p_Key: TypeHint.intrinsic_str = attr.ib(
default=None,
validator=attr.validators.optional(attr.validators.instance_of(TypeCheck.intrinsic_str_type)),
metadata={AttrMeta.PROPERTY_NAME: "Key"},
)
"""Doc: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-properties-codedeploy-deploymentgroup-ec2tagfilter.html#cfn-codedeploy-deploymentgroup-ec2tagfilter-key"""
p_Type: TypeHint.intrinsic_str = attr.ib(
default=None,
validator=attr.validators.optional(attr.validators.instance_of(TypeCheck.intrinsic_str_type)),
metadata={AttrMeta.PROPERTY_NAME: "Type"},
)
"""Doc: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-properties-codedeploy-deploymentgroup-ec2tagfilter.html#cfn-codedeploy-deploymentgroup-ec2tagfilter-type"""
p_Value: TypeHint.intrinsic_str = attr.ib(
default=None,
validator=attr.validators.optional(attr.validators.instance_of(TypeCheck.intrinsic_str_type)),
metadata={AttrMeta.PROPERTY_NAME: "Value"},
)
"""Doc: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-properties-codedeploy-deploymentgroup-ec2tagfilter.html#cfn-codedeploy-deploymentgroup-ec2tagfilter-value"""
@attr.s
class PropDeploymentGroupTriggerConfig(Property):
"""
AWS Object Type = "AWS::CodeDeploy::DeploymentGroup.TriggerConfig"
Resource Document: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-properties-codedeploy-deploymentgroup-triggerconfig.html
Property Document:
- ``p_TriggerEvents``: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-properties-codedeploy-deploymentgroup-triggerconfig.html#cfn-codedeploy-deploymentgroup-triggerconfig-triggerevents
- ``p_TriggerName``: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-properties-codedeploy-deploymentgroup-triggerconfig.html#cfn-codedeploy-deploymentgroup-triggerconfig-triggername
- ``p_TriggerTargetArn``: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-properties-codedeploy-deploymentgroup-triggerconfig.html#cfn-codedeploy-deploymentgroup-triggerconfig-triggertargetarn
"""
AWS_OBJECT_TYPE = "AWS::CodeDeploy::DeploymentGroup.TriggerConfig"
p_TriggerEvents: typing.List[TypeHint.intrinsic_str] = attr.ib(
default=None,
validator=attr.validators.optional(attr.validators.deep_iterable(member_validator=attr.validators.instance_of(TypeCheck.intrinsic_str_type), iterable_validator=attr.validators.instance_of(list))),
metadata={AttrMeta.PROPERTY_NAME: "TriggerEvents"},
)
"""Doc: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-properties-codedeploy-deploymentgroup-triggerconfig.html#cfn-codedeploy-deploymentgroup-triggerconfig-triggerevents"""
p_TriggerName: TypeHint.intrinsic_str = attr.ib(
default=None,
validator=attr.validators.optional(attr.validators.instance_of(TypeCheck.intrinsic_str_type)),
metadata={AttrMeta.PROPERTY_NAME: "TriggerName"},
)
"""Doc: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-properties-codedeploy-deploymentgroup-triggerconfig.html#cfn-codedeploy-deploymentgroup-triggerconfig-triggername"""
p_TriggerTargetArn: TypeHint.intrinsic_str = attr.ib(
default=None,
validator=attr.validators.optional(attr.validators.instance_of(TypeCheck.intrinsic_str_type)),
metadata={AttrMeta.PROPERTY_NAME: "TriggerTargetArn"},
)
"""Doc: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-properties-codedeploy-deploymentgroup-triggerconfig.html#cfn-codedeploy-deploymentgroup-triggerconfig-triggertargetarn"""
@attr.s
class PropDeploymentGroupDeploymentReadyOption(Property):
"""
AWS Object Type = "AWS::CodeDeploy::DeploymentGroup.DeploymentReadyOption"
Resource Document: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-properties-codedeploy-deploymentgroup-deploymentreadyoption.html
Property Document:
- ``p_ActionOnTimeout``: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-properties-codedeploy-deploymentgroup-deploymentreadyoption.html#cfn-codedeploy-deploymentgroup-bluegreendeploymentconfiguration-deploymentreadyoption-actionontimeout
- ``p_WaitTimeInMinutes``: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-properties-codedeploy-deploymentgroup-deploymentreadyoption.html#cfn-codedeploy-deploymentgroup-bluegreendeploymentconfiguration-deploymentreadyoption-waittimeinminutes
"""
AWS_OBJECT_TYPE = "AWS::CodeDeploy::DeploymentGroup.DeploymentReadyOption"
p_ActionOnTimeout: TypeHint.intrinsic_str = attr.ib(
default=None,
validator=attr.validators.optional(attr.validators.instance_of(TypeCheck.intrinsic_str_type)),
metadata={AttrMeta.PROPERTY_NAME: "ActionOnTimeout"},
)
"""Doc: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-properties-codedeploy-deploymentgroup-deploymentreadyoption.html#cfn-codedeploy-deploymentgroup-bluegreendeploymentconfiguration-deploymentreadyoption-actionontimeout"""
p_WaitTimeInMinutes: int = attr.ib(
default=None,
validator=attr.validators.optional(attr.validators.instance_of(int)),
metadata={AttrMeta.PROPERTY_NAME: "WaitTimeInMinutes"},
)
"""Doc: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-properties-codedeploy-deploymentgroup-deploymentreadyoption.html#cfn-codedeploy-deploymentgroup-bluegreendeploymentconfiguration-deploymentreadyoption-waittimeinminutes"""
@attr.s
class PropDeploymentConfigMinimumHealthyHosts(Property):
"""
AWS Object Type = "AWS::CodeDeploy::DeploymentConfig.MinimumHealthyHosts"
Resource Document: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-properties-codedeploy-deploymentconfig-minimumhealthyhosts.html
Property Document:
- ``rp_Type``: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-properties-codedeploy-deploymentconfig-minimumhealthyhosts.html#cfn-codedeploy-deploymentconfig-minimumhealthyhosts-type
- ``rp_Value``: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-properties-codedeploy-deploymentconfig-minimumhealthyhosts.html#cfn-codedeploy-deploymentconfig-minimumhealthyhosts-value
"""
AWS_OBJECT_TYPE = "AWS::CodeDeploy::DeploymentConfig.MinimumHealthyHosts"
rp_Type: TypeHint.intrinsic_str = attr.ib(
default=None,
validator=attr.validators.instance_of(TypeCheck.intrinsic_str_type),
metadata={AttrMeta.PROPERTY_NAME: "Type"},
)
"""Doc: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-properties-codedeploy-deploymentconfig-minimumhealthyhosts.html#cfn-codedeploy-deploymentconfig-minimumhealthyhosts-type"""
rp_Value: int = attr.ib(
default=None,
validator=attr.validators.instance_of(int),
metadata={AttrMeta.PROPERTY_NAME: "Value"},
)
"""Doc: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-properties-codedeploy-deploymentconfig-minimumhealthyhosts.html#cfn-codedeploy-deploymentconfig-minimumhealthyhosts-value"""
@attr.s
class PropDeploymentGroupAutoRollbackConfiguration(Property):
"""
AWS Object Type = "AWS::CodeDeploy::DeploymentGroup.AutoRollbackConfiguration"
Resource Document: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-properties-codedeploy-deploymentgroup-autorollbackconfiguration.html
Property Document:
- ``p_Enabled``: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-properties-codedeploy-deploymentgroup-autorollbackconfiguration.html#cfn-codedeploy-deploymentgroup-autorollbackconfiguration-enabled
- ``p_Events``: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-properties-codedeploy-deploymentgroup-autorollbackconfiguration.html#cfn-codedeploy-deploymentgroup-autorollbackconfiguration-events
"""
AWS_OBJECT_TYPE = "AWS::CodeDeploy::DeploymentGroup.AutoRollbackConfiguration"
p_Enabled: bool = attr.ib(
default=None,
validator=attr.validators.optional(attr.validators.instance_of(bool)),
metadata={AttrMeta.PROPERTY_NAME: "Enabled"},
)
"""Doc: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-properties-codedeploy-deploymentgroup-autorollbackconfiguration.html#cfn-codedeploy-deploymentgroup-autorollbackconfiguration-enabled"""
p_Events: typing.List[TypeHint.intrinsic_str] = attr.ib(
default=None,
validator=attr.validators.optional(attr.validators.deep_iterable(member_validator=attr.validators.instance_of(TypeCheck.intrinsic_str_type), iterable_validator=attr.validators.instance_of(list))),
metadata={AttrMeta.PROPERTY_NAME: "Events"},
)
"""Doc: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-properties-codedeploy-deploymentgroup-autorollbackconfiguration.html#cfn-codedeploy-deploymentgroup-autorollbackconfiguration-events"""
@attr.s
class PropDeploymentGroupS3Location(Property):
"""
AWS Object Type = "AWS::CodeDeploy::DeploymentGroup.S3Location"
Resource Document: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-properties-codedeploy-deploymentgroup-deployment-revision-s3location.html
Property Document:
- ``rp_Bucket``: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-properties-codedeploy-deploymentgroup-deployment-revision-s3location.html#cfn-properties-codedeploy-deploymentgroup-deployment-revision-s3location-bucket
- ``rp_Key``: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-properties-codedeploy-deploymentgroup-deployment-revision-s3location.html#cfn-properties-codedeploy-deploymentgroup-deployment-revision-s3location-key
- ``p_BundleType``: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-properties-codedeploy-deploymentgroup-deployment-revision-s3location.html#cfn-properties-codedeploy-deploymentgroup-deployment-revision-s3location-bundletype
- ``p_ETag``: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-properties-codedeploy-deploymentgroup-deployment-revision-s3location.html#cfn-properties-codedeploy-deploymentgroup-deployment-revision-s3location-etag
- ``p_Version``: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-properties-codedeploy-deploymentgroup-deployment-revision-s3location.html#cfn-properties-codedeploy-deploymentgroup-deployment-revision-s3location-value
"""
AWS_OBJECT_TYPE = "AWS::CodeDeploy::DeploymentGroup.S3Location"
rp_Bucket: TypeHint.intrinsic_str = attr.ib(
default=None,
validator=attr.validators.instance_of(TypeCheck.intrinsic_str_type),
metadata={AttrMeta.PROPERTY_NAME: "Bucket"},
)
"""Doc: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-properties-codedeploy-deploymentgroup-deployment-revision-s3location.html#cfn-properties-codedeploy-deploymentgroup-deployment-revision-s3location-bucket"""
rp_Key: TypeHint.intrinsic_str = attr.ib(
default=None,
validator=attr.validators.instance_of(TypeCheck.intrinsic_str_type),
metadata={AttrMeta.PROPERTY_NAME: "Key"},
)
"""Doc: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-properties-codedeploy-deploymentgroup-deployment-revision-s3location.html#cfn-properties-codedeploy-deploymentgroup-deployment-revision-s3location-key"""
p_BundleType: TypeHint.intrinsic_str = attr.ib(
default=None,
validator=attr.validators.optional(attr.validators.instance_of(TypeCheck.intrinsic_str_type)),
metadata={AttrMeta.PROPERTY_NAME: "BundleType"},
)
"""Doc: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-properties-codedeploy-deploymentgroup-deployment-revision-s3location.html#cfn-properties-codedeploy-deploymentgroup-deployment-revision-s3location-bundletype"""
p_ETag: TypeHint.intrinsic_str = attr.ib(
default=None,
validator=attr.validators.optional(attr.validators.instance_of(TypeCheck.intrinsic_str_type)),
metadata={AttrMeta.PROPERTY_NAME: "ETag"},
)
"""Doc: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-properties-codedeploy-deploymentgroup-deployment-revision-s3location.html#cfn-properties-codedeploy-deploymentgroup-deployment-revision-s3location-etag"""
p_Version: TypeHint.intrinsic_str = attr.ib(
default=None,
validator=attr.validators.optional(attr.validators.instance_of(TypeCheck.intrinsic_str_type)),
metadata={AttrMeta.PROPERTY_NAME: "Version"},
)
"""Doc: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-properties-codedeploy-deploymentgroup-deployment-revision-s3location.html#cfn-properties-codedeploy-deploymentgroup-deployment-revision-s3location-value"""
@attr.s
class PropDeploymentGroupBlueInstanceTerminationOption(Property):
"""
AWS Object Type = "AWS::CodeDeploy::DeploymentGroup.BlueInstanceTerminationOption"
Resource Document: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-properties-codedeploy-deploymentgroup-blueinstanceterminationoption.html
Property Document:
- ``p_Action``: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-properties-codedeploy-deploymentgroup-blueinstanceterminationoption.html#cfn-codedeploy-deploymentgroup-bluegreendeploymentconfiguration-blueinstanceterminationoption-action
- ``p_TerminationWaitTimeInMinutes``: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-properties-codedeploy-deploymentgroup-blueinstanceterminationoption.html#cfn-codedeploy-deploymentgroup-bluegreendeploymentconfiguration-blueinstanceterminationoption-terminationwaittimeinminutes
"""
AWS_OBJECT_TYPE = "AWS::CodeDeploy::DeploymentGroup.BlueInstanceTerminationOption"
p_Action: TypeHint.intrinsic_str = attr.ib(
default=None,
validator=attr.validators.optional(attr.validators.instance_of(TypeCheck.intrinsic_str_type)),
metadata={AttrMeta.PROPERTY_NAME: "Action"},
)
"""Doc: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-properties-codedeploy-deploymentgroup-blueinstanceterminationoption.html#cfn-codedeploy-deploymentgroup-bluegreendeploymentconfiguration-blueinstanceterminationoption-action"""
p_TerminationWaitTimeInMinutes: int = attr.ib(
default=None,
validator=attr.validators.optional(attr.validators.instance_of(int)),
metadata={AttrMeta.PROPERTY_NAME: "TerminationWaitTimeInMinutes"},
)
"""Doc: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-properties-codedeploy-deploymentgroup-blueinstanceterminationoption.html#cfn-codedeploy-deploymentgroup-bluegreendeploymentconfiguration-blueinstanceterminationoption-terminationwaittimeinminutes"""
@attr.s
class PropDeploymentGroupTagFilter(Property):
"""
AWS Object Type = "AWS::CodeDeploy::DeploymentGroup.TagFilter"
Resource Document: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-properties-codedeploy-deploymentgroup-tagfilter.html
Property Document:
- ``p_Key``: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-properties-codedeploy-deploymentgroup-tagfilter.html#cfn-codedeploy-deploymentgroup-tagfilter-key
- ``p_Type``: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-properties-codedeploy-deploymentgroup-tagfilter.html#cfn-codedeploy-deploymentgroup-tagfilter-type
- ``p_Value``: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-properties-codedeploy-deploymentgroup-tagfilter.html#cfn-codedeploy-deploymentgroup-tagfilter-value
"""
AWS_OBJECT_TYPE = "AWS::CodeDeploy::DeploymentGroup.TagFilter"
p_Key: TypeHint.intrinsic_str = attr.ib(
default=None,
validator=attr.validators.optional(attr.validators.instance_of(TypeCheck.intrinsic_str_type)),
metadata={AttrMeta.PROPERTY_NAME: "Key"},
)
"""Doc: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-properties-codedeploy-deploymentgroup-tagfilter.html#cfn-codedeploy-deploymentgroup-tagfilter-key"""
p_Type: TypeHint.intrinsic_str = attr.ib(
default=None,
validator=attr.validators.optional(attr.validators.instance_of(TypeCheck.intrinsic_str_type)),
metadata={AttrMeta.PROPERTY_NAME: "Type"},
)
"""Doc: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-properties-codedeploy-deploymentgroup-tagfilter.html#cfn-codedeploy-deploymentgroup-tagfilter-type"""
p_Value: TypeHint.intrinsic_str = attr.ib(
default=None,
validator=attr.validators.optional(attr.validators.instance_of(TypeCheck.intrinsic_str_type)),
metadata={AttrMeta.PROPERTY_NAME: "Value"},
)
"""Doc: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-properties-codedeploy-deploymentgroup-tagfilter.html#cfn-codedeploy-deploymentgroup-tagfilter-value"""
@attr.s
class PropDeploymentGroupGitHubLocation(Property):
"""
AWS Object Type = "AWS::CodeDeploy::DeploymentGroup.GitHubLocation"
Resource Document: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-properties-codedeploy-deploymentgroup-deployment-revision-githublocation.html
Property Document:
- ``rp_CommitId``: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-properties-codedeploy-deploymentgroup-deployment-revision-githublocation.html#cfn-properties-codedeploy-deploymentgroup-deployment-revision-githublocation-commitid
- ``rp_Repository``: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-properties-codedeploy-deploymentgroup-deployment-revision-githublocation.html#cfn-properties-codedeploy-deploymentgroup-deployment-revision-githublocation-repository
"""
AWS_OBJECT_TYPE = "AWS::CodeDeploy::DeploymentGroup.GitHubLocation"
rp_CommitId: TypeHint.intrinsic_str = attr.ib(
default=None,
validator=attr.validators.instance_of(TypeCheck.intrinsic_str_type),
metadata={AttrMeta.PROPERTY_NAME: "CommitId"},
)
"""Doc: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-properties-codedeploy-deploymentgroup-deployment-revision-githublocation.html#cfn-properties-codedeploy-deploymentgroup-deployment-revision-githublocation-commitid"""
rp_Repository: TypeHint.intrinsic_str = attr.ib(
default=None,
validator=attr.validators.instance_of(TypeCheck.intrinsic_str_type),
metadata={AttrMeta.PROPERTY_NAME: "Repository"},
)
"""Doc: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-properties-codedeploy-deploymentgroup-deployment-revision-githublocation.html#cfn-properties-codedeploy-deploymentgroup-deployment-revision-githublocation-repository"""
@attr.s
class PropDeploymentGroupELBInfo(Property):
"""
AWS Object Type = "AWS::CodeDeploy::DeploymentGroup.ELBInfo"
Resource Document: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-properties-codedeploy-deploymentgroup-elbinfo.html
Property Document:
- ``p_Name``: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-properties-codedeploy-deploymentgroup-elbinfo.html#cfn-codedeploy-deploymentgroup-elbinfo-name
"""
AWS_OBJECT_TYPE = "AWS::CodeDeploy::DeploymentGroup.ELBInfo"
p_Name: TypeHint.intrinsic_str = attr.ib(
default=None,
validator=attr.validators.optional(attr.validators.instance_of(TypeCheck.intrinsic_str_type)),
metadata={AttrMeta.PROPERTY_NAME: "Name"},
)
"""Doc: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-properties-codedeploy-deploymentgroup-elbinfo.html#cfn-codedeploy-deploymentgroup-elbinfo-name"""
@attr.s
class PropDeploymentGroupAlarm(Property):
"""
AWS Object Type = "AWS::CodeDeploy::DeploymentGroup.Alarm"
Resource Document: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-properties-codedeploy-deploymentgroup-alarm.html
Property Document:
- ``p_Name``: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-properties-codedeploy-deploymentgroup-alarm.html#cfn-codedeploy-deploymentgroup-alarm-name
"""
AWS_OBJECT_TYPE = "AWS::CodeDeploy::DeploymentGroup.Alarm"
p_Name: TypeHint.intrinsic_str = attr.ib(
default=None,
validator=attr.validators.optional(attr.validators.instance_of(TypeCheck.intrinsic_str_type)),
metadata={AttrMeta.PROPERTY_NAME: "Name"},
)
"""Doc: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-properties-codedeploy-deploymentgroup-alarm.html#cfn-codedeploy-deploymentgroup-alarm-name"""
@attr.s
class PropDeploymentConfigTimeBasedLinear(Property):
"""
AWS Object Type = "AWS::CodeDeploy::DeploymentConfig.TimeBasedLinear"
Resource Document: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-properties-codedeploy-deploymentconfig-timebasedlinear.html
Property Document:
- ``rp_LinearInterval``: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-properties-codedeploy-deploymentconfig-timebasedlinear.html#cfn-properties-codedeploy-deploymentconfig-trafficroutingconfig-timebasedlinear-linearinterval
- ``rp_LinearPercentage``: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-properties-codedeploy-deploymentconfig-timebasedlinear.html#cfn-properties-codedeploy-deploymentconfig-trafficroutingconfig-timebasedlinear-linearpercentage
"""
AWS_OBJECT_TYPE = "AWS::CodeDeploy::DeploymentConfig.TimeBasedLinear"
rp_LinearInterval: int = attr.ib(
default=None,
validator=attr.validators.instance_of(int),
metadata={AttrMeta.PROPERTY_NAME: "LinearInterval"},
)
"""Doc: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-properties-codedeploy-deploymentconfig-timebasedlinear.html#cfn-properties-codedeploy-deploymentconfig-trafficroutingconfig-timebasedlinear-linearinterval"""
rp_LinearPercentage: int = attr.ib(
default=None,
validator=attr.validators.instance_of(int),
metadata={AttrMeta.PROPERTY_NAME: "LinearPercentage"},
)
"""Doc: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-properties-codedeploy-deploymentconfig-timebasedlinear.html#cfn-properties-codedeploy-deploymentconfig-trafficroutingconfig-timebasedlinear-linearpercentage"""
@attr.s
class PropDeploymentGroupGreenFleetProvisioningOption(Property):
"""
AWS Object Type = "AWS::CodeDeploy::DeploymentGroup.GreenFleetProvisioningOption"
Resource Document: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-properties-codedeploy-deploymentgroup-greenfleetprovisioningoption.html
Property Document:
- ``p_Action``: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-properties-codedeploy-deploymentgroup-greenfleetprovisioningoption.html#cfn-codedeploy-deploymentgroup-bluegreendeploymentconfiguration-greenfleetprovisioningoption-action
"""
AWS_OBJECT_TYPE = "AWS::CodeDeploy::DeploymentGroup.GreenFleetProvisioningOption"
p_Action: TypeHint.intrinsic_str = attr.ib(
default=None,
validator=attr.validators.optional(attr.validators.instance_of(TypeCheck.intrinsic_str_type)),
metadata={AttrMeta.PROPERTY_NAME: "Action"},
)
"""Doc: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-properties-codedeploy-deploymentgroup-greenfleetprovisioningoption.html#cfn-codedeploy-deploymentgroup-bluegreendeploymentconfiguration-greenfleetprovisioningoption-action"""
@attr.s
class PropDeploymentGroupECSService(Property):
"""
AWS Object Type = "AWS::CodeDeploy::DeploymentGroup.ECSService"
Resource Document: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-properties-codedeploy-deploymentgroup-ecsservice.html
Property Document:
- ``rp_ClusterName``: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-properties-codedeploy-deploymentgroup-ecsservice.html#cfn-codedeploy-deploymentgroup-ecsservice-clustername
- ``rp_ServiceName``: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-properties-codedeploy-deploymentgroup-ecsservice.html#cfn-codedeploy-deploymentgroup-ecsservice-servicename
"""
AWS_OBJECT_TYPE = "AWS::CodeDeploy::DeploymentGroup.ECSService"
rp_ClusterName: TypeHint.intrinsic_str = attr.ib(
default=None,
validator=attr.validators.instance_of(TypeCheck.intrinsic_str_type),
metadata={AttrMeta.PROPERTY_NAME: "ClusterName"},
)
"""Doc: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-properties-codedeploy-deploymentgroup-ecsservice.html#cfn-codedeploy-deploymentgroup-ecsservice-clustername"""
rp_ServiceName: TypeHint.intrinsic_str = attr.ib(
default=None,
validator=attr.validators.instance_of(TypeCheck.intrinsic_str_type),
metadata={AttrMeta.PROPERTY_NAME: "ServiceName"},
)
"""Doc: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-properties-codedeploy-deploymentgroup-ecsservice.html#cfn-codedeploy-deploymentgroup-ecsservice-servicename"""
@attr.s
class PropDeploymentGroupEC2TagSetListObject(Property):
"""
AWS Object Type = "AWS::CodeDeploy::DeploymentGroup.EC2TagSetListObject"
Resource Document: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-properties-codedeploy-deploymentgroup-ec2tagsetlistobject.html
Property Document:
- ``p_Ec2TagGroup``: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-properties-codedeploy-deploymentgroup-ec2tagsetlistobject.html#cfn-codedeploy-deploymentgroup-ec2tagsetlistobject-ec2taggroup
"""
AWS_OBJECT_TYPE = "AWS::CodeDeploy::DeploymentGroup.EC2TagSetListObject"
p_Ec2TagGroup: typing.List[typing.Union['PropDeploymentGroupEC2TagFilter', dict]] = attr.ib(
default=None,
converter=PropDeploymentGroupEC2TagFilter.from_list,
validator=attr.validators.optional(attr.validators.deep_iterable(member_validator=attr.validators.instance_of(PropDeploymentGroupEC2TagFilter), iterable_validator=attr.validators.instance_of(list))),
metadata={AttrMeta.PROPERTY_NAME: "Ec2TagGroup"},
)
"""Doc: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-properties-codedeploy-deploymentgroup-ec2tagsetlistobject.html#cfn-codedeploy-deploymentgroup-ec2tagsetlistobject-ec2taggroup"""
@attr.s
class PropDeploymentGroupOnPremisesTagSetListObject(Property):
"""
AWS Object Type = "AWS::CodeDeploy::DeploymentGroup.OnPremisesTagSetListObject"
Resource Document: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-properties-codedeploy-deploymentgroup-onpremisestagsetlistobject.html
Property Document:
- ``p_OnPremisesTagGroup``: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-properties-codedeploy-deploymentgroup-onpremisestagsetlistobject.html#cfn-codedeploy-deploymentgroup-onpremisestagsetlistobject-onpremisestaggroup
"""
AWS_OBJECT_TYPE = "AWS::CodeDeploy::DeploymentGroup.OnPremisesTagSetListObject"
p_OnPremisesTagGroup: typing.List[typing.Union['PropDeploymentGroupTagFilter', dict]] = attr.ib(
default=None,
converter=PropDeploymentGroupTagFilter.from_list,
validator=attr.validators.optional(attr.validators.deep_iterable(member_validator=attr.validators.instance_of(PropDeploymentGroupTagFilter), iterable_validator=attr.validators.instance_of(list))),
metadata={AttrMeta.PROPERTY_NAME: "OnPremisesTagGroup"},
)
"""Doc: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-properties-codedeploy-deploymentgroup-onpremisestagsetlistobject.html#cfn-codedeploy-deploymentgroup-onpremisestagsetlistobject-onpremisestaggroup"""
@attr.s
class PropDeploymentGroupOnPremisesTagSet(Property):
"""
AWS Object Type = "AWS::CodeDeploy::DeploymentGroup.OnPremisesTagSet"
Resource Document: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-properties-codedeploy-deploymentgroup-onpremisestagset.html
Property Document:
- ``p_OnPremisesTagSetList``: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-properties-codedeploy-deploymentgroup-onpremisestagset.html#cfn-codedeploy-deploymentgroup-onpremisestagset-onpremisestagsetlist
"""
AWS_OBJECT_TYPE = "AWS::CodeDeploy::DeploymentGroup.OnPremisesTagSet"
p_OnPremisesTagSetList: typing.List[typing.Union['PropDeploymentGroupOnPremisesTagSetListObject', dict]] = attr.ib(
default=None,
converter=PropDeploymentGroupOnPremisesTagSetListObject.from_list,
validator=attr.validators.optional(attr.validators.deep_iterable(member_validator=attr.validators.instance_of(PropDeploymentGroupOnPremisesTagSetListObject), iterable_validator=attr.validators.instance_of(list))),
metadata={AttrMeta.PROPERTY_NAME: "OnPremisesTagSetList"},
)
"""Doc: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-properties-codedeploy-deploymentgroup-onpremisestagset.html#cfn-codedeploy-deploymentgroup-onpremisestagset-onpremisestagsetlist"""
@attr.s
class PropDeploymentGroupLoadBalancerInfo(Property):
"""
AWS Object Type = "AWS::CodeDeploy::DeploymentGroup.LoadBalancerInfo"
Resource Document: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-properties-codedeploy-deploymentgroup-loadbalancerinfo.html
Property Document:
- ``p_ElbInfoList``: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-properties-codedeploy-deploymentgroup-loadbalancerinfo.html#cfn-codedeploy-deploymentgroup-loadbalancerinfo-elbinfolist
- ``p_TargetGroupInfoList``: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-properties-codedeploy-deploymentgroup-loadbalancerinfo.html#cfn-codedeploy-deploymentgroup-loadbalancerinfo-targetgroupinfolist
"""
AWS_OBJECT_TYPE = "AWS::CodeDeploy::DeploymentGroup.LoadBalancerInfo"
p_ElbInfoList: typing.List[typing.Union['PropDeploymentGroupELBInfo', dict]] = attr.ib(
default=None,
converter=PropDeploymentGroupELBInfo.from_list,
validator=attr.validators.optional(attr.validators.deep_iterable(member_validator=attr.validators.instance_of(PropDeploymentGroupELBInfo), iterable_validator=attr.validators.instance_of(list))),
metadata={AttrMeta.PROPERTY_NAME: "ElbInfoList"},
)
"""Doc: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-properties-codedeploy-deploymentgroup-loadbalancerinfo.html#cfn-codedeploy-deploymentgroup-loadbalancerinfo-elbinfolist"""
p_TargetGroupInfoList: typing.List[typing.Union['PropDeploymentGroupTargetGroupInfo', dict]] = attr.ib(
default=None,
converter=PropDeploymentGroupTargetGroupInfo.from_list,
validator=attr.validators.optional(attr.validators.deep_iterable(member_validator=attr.validators.instance_of(PropDeploymentGroupTargetGroupInfo), iterable_validator=attr.validators.instance_of(list))),
metadata={AttrMeta.PROPERTY_NAME: "TargetGroupInfoList"},
)
"""Doc: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-properties-codedeploy-deploymentgroup-loadbalancerinfo.html#cfn-codedeploy-deploymentgroup-loadbalancerinfo-targetgroupinfolist"""
@attr.s
class PropDeploymentGroupRevisionLocation(Property):
"""
AWS Object Type = "AWS::CodeDeploy::DeploymentGroup.RevisionLocation"
Resource Document: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-properties-codedeploy-deploymentgroup-deployment-revision.html
Property Document:
- ``p_GitHubLocation``: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-properties-codedeploy-deploymentgroup-deployment-revision.html#cfn-properties-codedeploy-deploymentgroup-deployment-revision-githublocation
- ``p_RevisionType``: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-properties-codedeploy-deploymentgroup-deployment-revision.html#cfn-properties-codedeploy-deploymentgroup-deployment-revision-revisiontype
- ``p_S3Location``: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-properties-codedeploy-deploymentgroup-deployment-revision.html#cfn-properties-codedeploy-deploymentgroup-deployment-revision-s3location
"""
AWS_OBJECT_TYPE = "AWS::CodeDeploy::DeploymentGroup.RevisionLocation"
p_GitHubLocation: typing.Union['PropDeploymentGroupGitHubLocation', dict] = attr.ib(
default=None,
converter=PropDeploymentGroupGitHubLocation.from_dict,
validator=attr.validators.optional(attr.validators.instance_of(PropDeploymentGroupGitHubLocation)),
metadata={AttrMeta.PROPERTY_NAME: "GitHubLocation"},
)
"""Doc: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-properties-codedeploy-deploymentgroup-deployment-revision.html#cfn-properties-codedeploy-deploymentgroup-deployment-revision-githublocation"""
p_RevisionType: TypeHint.intrinsic_str = attr.ib(
default=None,
validator=attr.validators.optional(attr.validators.instance_of(TypeCheck.intrinsic_str_type)),
metadata={AttrMeta.PROPERTY_NAME: "RevisionType"},
)
"""Doc: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-properties-codedeploy-deploymentgroup-deployment-revision.html#cfn-properties-codedeploy-deploymentgroup-deployment-revision-revisiontype"""
p_S3Location: typing.Union['PropDeploymentGroupS3Location', dict] = attr.ib(
default=None,
converter=PropDeploymentGroupS3Location.from_dict,
validator=attr.validators.optional(attr.validators.instance_of(PropDeploymentGroupS3Location)),
metadata={AttrMeta.PROPERTY_NAME: "S3Location"},
)
"""Doc: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-properties-codedeploy-deploymentgroup-deployment-revision.html#cfn-properties-codedeploy-deploymentgroup-deployment-revision-s3location"""
@attr.s
class PropDeploymentGroupEC2TagSet(Property):
"""
AWS Object Type = "AWS::CodeDeploy::DeploymentGroup.EC2TagSet"
Resource Document: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-properties-codedeploy-deploymentgroup-ec2tagset.html
Property Document:
- ``p_Ec2TagSetList``: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-properties-codedeploy-deploymentgroup-ec2tagset.html#cfn-codedeploy-deploymentgroup-ec2tagset-ec2tagsetlist
"""
AWS_OBJECT_TYPE = "AWS::CodeDeploy::DeploymentGroup.EC2TagSet"
p_Ec2TagSetList: typing.List[typing.Union['PropDeploymentGroupEC2TagSetListObject', dict]] = attr.ib(
default=None,
converter=PropDeploymentGroupEC2TagSetListObject.from_list,
validator=attr.validators.optional(attr.validators.deep_iterable(member_validator=attr.validators.instance_of(PropDeploymentGroupEC2TagSetListObject), iterable_validator=attr.validators.instance_of(list))),
metadata={AttrMeta.PROPERTY_NAME: "Ec2TagSetList"},
)
"""Doc: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-properties-codedeploy-deploymentgroup-ec2tagset.html#cfn-codedeploy-deploymentgroup-ec2tagset-ec2tagsetlist"""
@attr.s
class PropDeploymentGroupAlarmConfiguration(Property):
"""
AWS Object Type = "AWS::CodeDeploy::DeploymentGroup.AlarmConfiguration"
Resource Document: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-properties-codedeploy-deploymentgroup-alarmconfiguration.html
Property Document:
- ``p_Alarms``: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-properties-codedeploy-deploymentgroup-alarmconfiguration.html#cfn-codedeploy-deploymentgroup-alarmconfiguration-alarms
- ``p_Enabled``: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-properties-codedeploy-deploymentgroup-alarmconfiguration.html#cfn-codedeploy-deploymentgroup-alarmconfiguration-enabled
- ``p_IgnorePollAlarmFailure``: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-properties-codedeploy-deploymentgroup-alarmconfiguration.html#cfn-codedeploy-deploymentgroup-alarmconfiguration-ignorepollalarmfailure
"""
AWS_OBJECT_TYPE = "AWS::CodeDeploy::DeploymentGroup.AlarmConfiguration"
p_Alarms: typing.List[typing.Union['PropDeploymentGroupAlarm', dict]] = attr.ib(
default=None,
converter=PropDeploymentGroupAlarm.from_list,
validator=attr.validators.optional(attr.validators.deep_iterable(member_validator=attr.validators.instance_of(PropDeploymentGroupAlarm), iterable_validator=attr.validators.instance_of(list))),
metadata={AttrMeta.PROPERTY_NAME: "Alarms"},
)
"""Doc: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-properties-codedeploy-deploymentgroup-alarmconfiguration.html#cfn-codedeploy-deploymentgroup-alarmconfiguration-alarms"""
p_Enabled: bool = attr.ib(
default=None,
validator=attr.validators.optional(attr.validators.instance_of(bool)),
metadata={AttrMeta.PROPERTY_NAME: "Enabled"},
)
"""Doc: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-properties-codedeploy-deploymentgroup-alarmconfiguration.html#cfn-codedeploy-deploymentgroup-alarmconfiguration-enabled"""
p_IgnorePollAlarmFailure: bool = attr.ib(
default=None,
validator=attr.validators.optional(attr.validators.instance_of(bool)),
metadata={AttrMeta.PROPERTY_NAME: "IgnorePollAlarmFailure"},
)
"""Doc: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-properties-codedeploy-deploymentgroup-alarmconfiguration.html#cfn-codedeploy-deploymentgroup-alarmconfiguration-ignorepollalarmfailure"""
@attr.s
class PropDeploymentGroupBlueGreenDeploymentConfiguration(Property):
"""
AWS Object Type = "AWS::CodeDeploy::DeploymentGroup.BlueGreenDeploymentConfiguration"
Resource Document: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-properties-codedeploy-deploymentgroup-bluegreendeploymentconfiguration.html
Property Document:
- ``p_DeploymentReadyOption``: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-properties-codedeploy-deploymentgroup-bluegreendeploymentconfiguration.html#cfn-codedeploy-deploymentgroup-bluegreendeploymentconfiguration-deploymentreadyoption
- ``p_GreenFleetProvisioningOption``: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-properties-codedeploy-deploymentgroup-bluegreendeploymentconfiguration.html#cfn-codedeploy-deploymentgroup-bluegreendeploymentconfiguration-greenfleetprovisioningoption
- ``p_TerminateBlueInstancesOnDeploymentSuccess``: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-properties-codedeploy-deploymentgroup-bluegreendeploymentconfiguration.html#cfn-codedeploy-deploymentgroup-bluegreendeploymentconfiguration-terminateblueinstancesondeploymentsuccess
"""
AWS_OBJECT_TYPE = "AWS::CodeDeploy::DeploymentGroup.BlueGreenDeploymentConfiguration"
p_DeploymentReadyOption: typing.Union['PropDeploymentGroupDeploymentReadyOption', dict] = attr.ib(
default=None,
converter=PropDeploymentGroupDeploymentReadyOption.from_dict,
validator=attr.validators.optional(attr.validators.instance_of(PropDeploymentGroupDeploymentReadyOption)),
metadata={AttrMeta.PROPERTY_NAME: "DeploymentReadyOption"},
)
"""Doc: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-properties-codedeploy-deploymentgroup-bluegreendeploymentconfiguration.html#cfn-codedeploy-deploymentgroup-bluegreendeploymentconfiguration-deploymentreadyoption"""
p_GreenFleetProvisioningOption: typing.Union['PropDeploymentGroupGreenFleetProvisioningOption', dict] = attr.ib(
default=None,
converter=PropDeploymentGroupGreenFleetProvisioningOption.from_dict,
validator=attr.validators.optional(attr.validators.instance_of(PropDeploymentGroupGreenFleetProvisioningOption)),
metadata={AttrMeta.PROPERTY_NAME: "GreenFleetProvisioningOption"},
)
"""Doc: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-properties-codedeploy-deploymentgroup-bluegreendeploymentconfiguration.html#cfn-codedeploy-deploymentgroup-bluegreendeploymentconfiguration-greenfleetprovisioningoption"""
p_TerminateBlueInstancesOnDeploymentSuccess: typing.Union['PropDeploymentGroupBlueInstanceTerminationOption', dict] = attr.ib(
default=None,
converter=PropDeploymentGroupBlueInstanceTerminationOption.from_dict,
validator=attr.validators.optional(attr.validators.instance_of(PropDeploymentGroupBlueInstanceTerminationOption)),
metadata={AttrMeta.PROPERTY_NAME: "TerminateBlueInstancesOnDeploymentSuccess"},
)
"""Doc: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-properties-codedeploy-deploymentgroup-bluegreendeploymentconfiguration.html#cfn-codedeploy-deploymentgroup-bluegreendeploymentconfiguration-terminateblueinstancesondeploymentsuccess"""
@attr.s
class PropDeploymentConfigTrafficRoutingConfig(Property):
"""
AWS Object Type = "AWS::CodeDeploy::DeploymentConfig.TrafficRoutingConfig"
Resource Document: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-properties-codedeploy-deploymentconfig-trafficroutingconfig.html
Property Document:
- ``rp_Type``: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-properties-codedeploy-deploymentconfig-trafficroutingconfig.html#cfn-properties-codedeploy-deploymentconfig-trafficroutingconfig-type
- ``p_TimeBasedCanary``: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-properties-codedeploy-deploymentconfig-trafficroutingconfig.html#cfn-properties-codedeploy-deploymentconfig-trafficroutingconfig-timebasedcanary
- ``p_TimeBasedLinear``: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-properties-codedeploy-deploymentconfig-trafficroutingconfig.html#cfn-properties-codedeploy-deploymentconfig-trafficroutingconfig-timebasedlinear
"""
AWS_OBJECT_TYPE = "AWS::CodeDeploy::DeploymentConfig.TrafficRoutingConfig"
rp_Type: TypeHint.intrinsic_str = attr.ib(
default=None,
validator=attr.validators.instance_of(TypeCheck.intrinsic_str_type),
metadata={AttrMeta.PROPERTY_NAME: "Type"},
)
"""Doc: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-properties-codedeploy-deploymentconfig-trafficroutingconfig.html#cfn-properties-codedeploy-deploymentconfig-trafficroutingconfig-type"""
p_TimeBasedCanary: typing.Union['PropDeploymentConfigTimeBasedCanary', dict] = attr.ib(
default=None,
converter=PropDeploymentConfigTimeBasedCanary.from_dict,
validator=attr.validators.optional(attr.validators.instance_of(PropDeploymentConfigTimeBasedCanary)),
metadata={AttrMeta.PROPERTY_NAME: "TimeBasedCanary"},
)
"""Doc: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-properties-codedeploy-deploymentconfig-trafficroutingconfig.html#cfn-properties-codedeploy-deploymentconfig-trafficroutingconfig-timebasedcanary"""
p_TimeBasedLinear: typing.Union['PropDeploymentConfigTimeBasedLinear', dict] = attr.ib(
default=None,
converter=PropDeploymentConfigTimeBasedLinear.from_dict,
validator=attr.validators.optional(attr.validators.instance_of(PropDeploymentConfigTimeBasedLinear)),
metadata={AttrMeta.PROPERTY_NAME: "TimeBasedLinear"},
)
"""Doc: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-properties-codedeploy-deploymentconfig-trafficroutingconfig.html#cfn-properties-codedeploy-deploymentconfig-trafficroutingconfig-timebasedlinear"""
@attr.s
class PropDeploymentGroupDeployment(Property):
"""
AWS Object Type = "AWS::CodeDeploy::DeploymentGroup.Deployment"
Resource Document: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-properties-codedeploy-deploymentgroup-deployment.html
Property Document:
- ``rp_Revision``: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-properties-codedeploy-deploymentgroup-deployment.html#cfn-properties-codedeploy-deploymentgroup-deployment-revision
- ``p_Description``: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-properties-codedeploy-deploymentgroup-deployment.html#cfn-properties-codedeploy-deploymentgroup-deployment-description
- ``p_IgnoreApplicationStopFailures``: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-properties-codedeploy-deploymentgroup-deployment.html#cfn-properties-codedeploy-deploymentgroup-deployment-ignoreapplicationstopfailures
"""
AWS_OBJECT_TYPE = "AWS::CodeDeploy::DeploymentGroup.Deployment"
rp_Revision: typing.Union['PropDeploymentGroupRevisionLocation', dict] = attr.ib(
default=None,
converter=PropDeploymentGroupRevisionLocation.from_dict,
validator=attr.validators.instance_of(PropDeploymentGroupRevisionLocation),
metadata={AttrMeta.PROPERTY_NAME: "Revision"},
)
"""Doc: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-properties-codedeploy-deploymentgroup-deployment.html#cfn-properties-codedeploy-deploymentgroup-deployment-revision"""
p_Description: TypeHint.intrinsic_str = attr.ib(
default=None,
validator=attr.validators.optional(attr.validators.instance_of(TypeCheck.intrinsic_str_type)),
metadata={AttrMeta.PROPERTY_NAME: "Description"},
)
"""Doc: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-properties-codedeploy-deploymentgroup-deployment.html#cfn-properties-codedeploy-deploymentgroup-deployment-description"""
p_IgnoreApplicationStopFailures: bool = attr.ib(
default=None,
validator=attr.validators.optional(attr.validators.instance_of(bool)),
metadata={AttrMeta.PROPERTY_NAME: "IgnoreApplicationStopFailures"},
)
"""Doc: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-properties-codedeploy-deploymentgroup-deployment.html#cfn-properties-codedeploy-deploymentgroup-deployment-ignoreapplicationstopfailures"""
#--- Resource declaration ---
@attr.s
class DeploymentConfig(Resource):
"""
AWS Object Type = "AWS::CodeDeploy::DeploymentConfig"
Resource Document: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-resource-codedeploy-deploymentconfig.html
Property Document:
- ``p_ComputePlatform``: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-resource-codedeploy-deploymentconfig.html#cfn-codedeploy-deploymentconfig-computeplatform
- ``p_DeploymentConfigName``: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-resource-codedeploy-deploymentconfig.html#cfn-codedeploy-deploymentconfig-deploymentconfigname
- ``p_MinimumHealthyHosts``: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-resource-codedeploy-deploymentconfig.html#cfn-codedeploy-deploymentconfig-minimumhealthyhosts
- ``p_TrafficRoutingConfig``: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-resource-codedeploy-deploymentconfig.html#cfn-codedeploy-deploymentconfig-trafficroutingconfig
"""
AWS_OBJECT_TYPE = "AWS::CodeDeploy::DeploymentConfig"
p_ComputePlatform: TypeHint.intrinsic_str = attr.ib(
default=None,
validator=attr.validators.optional(attr.validators.instance_of(TypeCheck.intrinsic_str_type)),
metadata={AttrMeta.PROPERTY_NAME: "ComputePlatform"},
)
"""Doc: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-resource-codedeploy-deploymentconfig.html#cfn-codedeploy-deploymentconfig-computeplatform"""
p_DeploymentConfigName: TypeHint.intrinsic_str = attr.ib(
default=None,
validator=attr.validators.optional(attr.validators.instance_of(TypeCheck.intrinsic_str_type)),
metadata={AttrMeta.PROPERTY_NAME: "DeploymentConfigName"},
)
"""Doc: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-resource-codedeploy-deploymentconfig.html#cfn-codedeploy-deploymentconfig-deploymentconfigname"""
p_MinimumHealthyHosts: typing.Union['PropDeploymentConfigMinimumHealthyHosts', dict] = attr.ib(
default=None,
converter=PropDeploymentConfigMinimumHealthyHosts.from_dict,
validator=attr.validators.optional(attr.validators.instance_of(PropDeploymentConfigMinimumHealthyHosts)),
metadata={AttrMeta.PROPERTY_NAME: "MinimumHealthyHosts"},
)
"""Doc: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-resource-codedeploy-deploymentconfig.html#cfn-codedeploy-deploymentconfig-minimumhealthyhosts"""
p_TrafficRoutingConfig: typing.Union['PropDeploymentConfigTrafficRoutingConfig', dict] = attr.ib(
default=None,
converter=PropDeploymentConfigTrafficRoutingConfig.from_dict,
validator=attr.validators.optional(attr.validators.instance_of(PropDeploymentConfigTrafficRoutingConfig)),
metadata={AttrMeta.PROPERTY_NAME: "TrafficRoutingConfig"},
)
"""Doc: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-resource-codedeploy-deploymentconfig.html#cfn-codedeploy-deploymentconfig-trafficroutingconfig"""
@attr.s
class Application(Resource):
"""
AWS Object Type = "AWS::CodeDeploy::Application"
Resource Document: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-resource-codedeploy-application.html
Property Document:
- ``p_ApplicationName``: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-resource-codedeploy-application.html#cfn-codedeploy-application-applicationname
- ``p_ComputePlatform``: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-resource-codedeploy-application.html#cfn-codedeploy-application-computeplatform
- ``p_Tags``: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-resource-codedeploy-application.html#cfn-codedeploy-application-tags
"""
AWS_OBJECT_TYPE = "AWS::CodeDeploy::Application"
p_ApplicationName: TypeHint.intrinsic_str = attr.ib(
default=None,
validator=attr.validators.optional(attr.validators.instance_of(TypeCheck.intrinsic_str_type)),
metadata={AttrMeta.PROPERTY_NAME: "ApplicationName"},
)
"""Doc: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-resource-codedeploy-application.html#cfn-codedeploy-application-applicationname"""
p_ComputePlatform: TypeHint.intrinsic_str = attr.ib(
default=None,
validator=attr.validators.optional(attr.validators.instance_of(TypeCheck.intrinsic_str_type)),
metadata={AttrMeta.PROPERTY_NAME: "ComputePlatform"},
)
"""Doc: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-resource-codedeploy-application.html#cfn-codedeploy-application-computeplatform"""
p_Tags: typing.List[typing.Union[Tag, dict]] = attr.ib(
default=None,
converter=Tag.from_list,
validator=attr.validators.optional(attr.validators.deep_iterable(member_validator=attr.validators.instance_of(Tag), iterable_validator=attr.validators.instance_of(list))),
metadata={AttrMeta.PROPERTY_NAME: "Tags"},
)
"""Doc: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-resource-codedeploy-application.html#cfn-codedeploy-application-tags"""
@attr.s
class DeploymentGroup(Resource):
"""
AWS Object Type = "AWS::CodeDeploy::DeploymentGroup"
Resource Document: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-resource-codedeploy-deploymentgroup.html
Property Document:
- ``rp_ApplicationName``: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-resource-codedeploy-deploymentgroup.html#cfn-codedeploy-deploymentgroup-applicationname
- ``rp_ServiceRoleArn``: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-resource-codedeploy-deploymentgroup.html#cfn-codedeploy-deploymentgroup-servicerolearn
- ``p_AlarmConfiguration``: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-resource-codedeploy-deploymentgroup.html#cfn-codedeploy-deploymentgroup-alarmconfiguration
- ``p_AutoRollbackConfiguration``: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-resource-codedeploy-deploymentgroup.html#cfn-codedeploy-deploymentgroup-autorollbackconfiguration
- ``p_AutoScalingGroups``: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-resource-codedeploy-deploymentgroup.html#cfn-codedeploy-deploymentgroup-autoscalinggroups
- ``p_BlueGreenDeploymentConfiguration``: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-resource-codedeploy-deploymentgroup.html#cfn-codedeploy-deploymentgroup-bluegreendeploymentconfiguration
- ``p_Deployment``: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-resource-codedeploy-deploymentgroup.html#cfn-codedeploy-deploymentgroup-deployment
- ``p_DeploymentConfigName``: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-resource-codedeploy-deploymentgroup.html#cfn-codedeploy-deploymentgroup-deploymentconfigname
- ``p_DeploymentGroupName``: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-resource-codedeploy-deploymentgroup.html#cfn-codedeploy-deploymentgroup-deploymentgroupname
- ``p_DeploymentStyle``: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-resource-codedeploy-deploymentgroup.html#cfn-codedeploy-deploymentgroup-deploymentstyle
- ``p_ECSServices``: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-resource-codedeploy-deploymentgroup.html#cfn-codedeploy-deploymentgroup-ecsservices
- ``p_Ec2TagFilters``: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-resource-codedeploy-deploymentgroup.html#cfn-codedeploy-deploymentgroup-ec2tagfilters
- ``p_Ec2TagSet``: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-resource-codedeploy-deploymentgroup.html#cfn-codedeploy-deploymentgroup-ec2tagset
- ``p_LoadBalancerInfo``: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-resource-codedeploy-deploymentgroup.html#cfn-codedeploy-deploymentgroup-loadbalancerinfo
- ``p_OnPremisesInstanceTagFilters``: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-resource-codedeploy-deploymentgroup.html#cfn-codedeploy-deploymentgroup-onpremisesinstancetagfilters
- ``p_OnPremisesTagSet``: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-resource-codedeploy-deploymentgroup.html#cfn-codedeploy-deploymentgroup-onpremisestagset
- ``p_TriggerConfigurations``: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-resource-codedeploy-deploymentgroup.html#cfn-codedeploy-deploymentgroup-triggerconfigurations
"""
AWS_OBJECT_TYPE = "AWS::CodeDeploy::DeploymentGroup"
rp_ApplicationName: TypeHint.intrinsic_str = attr.ib(
default=None,
validator=attr.validators.instance_of(TypeCheck.intrinsic_str_type),
metadata={AttrMeta.PROPERTY_NAME: "ApplicationName"},
)
"""Doc: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-resource-codedeploy-deploymentgroup.html#cfn-codedeploy-deploymentgroup-applicationname"""
rp_ServiceRoleArn: TypeHint.intrinsic_str = attr.ib(
default=None,
validator=attr.validators.instance_of(TypeCheck.intrinsic_str_type),
metadata={AttrMeta.PROPERTY_NAME: "ServiceRoleArn"},
)
"""Doc: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-resource-codedeploy-deploymentgroup.html#cfn-codedeploy-deploymentgroup-servicerolearn"""
p_AlarmConfiguration: typing.Union['PropDeploymentGroupAlarmConfiguration', dict] = attr.ib(
default=None,
converter=PropDeploymentGroupAlarmConfiguration.from_dict,
validator=attr.validators.optional(attr.validators.instance_of(PropDeploymentGroupAlarmConfiguration)),
metadata={AttrMeta.PROPERTY_NAME: "AlarmConfiguration"},
)
"""Doc: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-resource-codedeploy-deploymentgroup.html#cfn-codedeploy-deploymentgroup-alarmconfiguration"""
p_AutoRollbackConfiguration: typing.Union['PropDeploymentGroupAutoRollbackConfiguration', dict] = attr.ib(
default=None,
converter=PropDeploymentGroupAutoRollbackConfiguration.from_dict,
validator=attr.validators.optional(attr.validators.instance_of(PropDeploymentGroupAutoRollbackConfiguration)),
metadata={AttrMeta.PROPERTY_NAME: "AutoRollbackConfiguration"},
)
"""Doc: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-resource-codedeploy-deploymentgroup.html#cfn-codedeploy-deploymentgroup-autorollbackconfiguration"""
p_AutoScalingGroups: typing.List[TypeHint.intrinsic_str] = attr.ib(
default=None,
validator=attr.validators.optional(attr.validators.deep_iterable(member_validator=attr.validators.instance_of(TypeCheck.intrinsic_str_type), iterable_validator=attr.validators.instance_of(list))),
metadata={AttrMeta.PROPERTY_NAME: "AutoScalingGroups"},
)
"""Doc: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-resource-codedeploy-deploymentgroup.html#cfn-codedeploy-deploymentgroup-autoscalinggroups"""
p_BlueGreenDeploymentConfiguration: typing.Union['PropDeploymentGroupBlueGreenDeploymentConfiguration', dict] = attr.ib(
default=None,
converter=PropDeploymentGroupBlueGreenDeploymentConfiguration.from_dict,
validator=attr.validators.optional(attr.validators.instance_of(PropDeploymentGroupBlueGreenDeploymentConfiguration)),
metadata={AttrMeta.PROPERTY_NAME: "BlueGreenDeploymentConfiguration"},
)
"""Doc: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-resource-codedeploy-deploymentgroup.html#cfn-codedeploy-deploymentgroup-bluegreendeploymentconfiguration"""
p_Deployment: typing.Union['PropDeploymentGroupDeployment', dict] = attr.ib(
default=None,
converter=PropDeploymentGroupDeployment.from_dict,
validator=attr.validators.optional(attr.validators.instance_of(PropDeploymentGroupDeployment)),
metadata={AttrMeta.PROPERTY_NAME: "Deployment"},
)
"""Doc: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-resource-codedeploy-deploymentgroup.html#cfn-codedeploy-deploymentgroup-deployment"""
p_DeploymentConfigName: TypeHint.intrinsic_str = attr.ib(
default=None,
validator=attr.validators.optional(attr.validators.instance_of(TypeCheck.intrinsic_str_type)),
metadata={AttrMeta.PROPERTY_NAME: "DeploymentConfigName"},
)
"""Doc: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-resource-codedeploy-deploymentgroup.html#cfn-codedeploy-deploymentgroup-deploymentconfigname"""
p_DeploymentGroupName: TypeHint.intrinsic_str = attr.ib(
default=None,
validator=attr.validators.optional(attr.validators.instance_of(TypeCheck.intrinsic_str_type)),
metadata={AttrMeta.PROPERTY_NAME: "DeploymentGroupName"},
)
"""Doc: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-resource-codedeploy-deploymentgroup.html#cfn-codedeploy-deploymentgroup-deploymentgroupname"""
p_DeploymentStyle: typing.Union['PropDeploymentGroupDeploymentStyle', dict] = attr.ib(
default=None,
converter=PropDeploymentGroupDeploymentStyle.from_dict,
validator=attr.validators.optional(attr.validators.instance_of(PropDeploymentGroupDeploymentStyle)),
metadata={AttrMeta.PROPERTY_NAME: "DeploymentStyle"},
)
"""Doc: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-resource-codedeploy-deploymentgroup.html#cfn-codedeploy-deploymentgroup-deploymentstyle"""
p_ECSServices: typing.List[typing.Union['PropDeploymentGroupECSService', dict]] = attr.ib(
default=None,
converter=PropDeploymentGroupECSService.from_list,
validator=attr.validators.optional(attr.validators.deep_iterable(member_validator=attr.validators.instance_of(PropDeploymentGroupECSService), iterable_validator=attr.validators.instance_of(list))),
metadata={AttrMeta.PROPERTY_NAME: "ECSServices"},
)
"""Doc: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-resource-codedeploy-deploymentgroup.html#cfn-codedeploy-deploymentgroup-ecsservices"""
p_Ec2TagFilters: typing.List[typing.Union['PropDeploymentGroupEC2TagFilter', dict]] = attr.ib(
default=None,
converter=PropDeploymentGroupEC2TagFilter.from_list,
validator=attr.validators.optional(attr.validators.deep_iterable(member_validator=attr.validators.instance_of(PropDeploymentGroupEC2TagFilter), iterable_validator=attr.validators.instance_of(list))),
metadata={AttrMeta.PROPERTY_NAME: "Ec2TagFilters"},
)
"""Doc: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-resource-codedeploy-deploymentgroup.html#cfn-codedeploy-deploymentgroup-ec2tagfilters"""
p_Ec2TagSet: typing.Union['PropDeploymentGroupEC2TagSet', dict] = attr.ib(
default=None,
converter=PropDeploymentGroupEC2TagSet.from_dict,
validator=attr.validators.optional(attr.validators.instance_of(PropDeploymentGroupEC2TagSet)),
metadata={AttrMeta.PROPERTY_NAME: "Ec2TagSet"},
)
"""Doc: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-resource-codedeploy-deploymentgroup.html#cfn-codedeploy-deploymentgroup-ec2tagset"""
p_LoadBalancerInfo: typing.Union['PropDeploymentGroupLoadBalancerInfo', dict] = attr.ib(
default=None,
converter=PropDeploymentGroupLoadBalancerInfo.from_dict,
validator=attr.validators.optional(attr.validators.instance_of(PropDeploymentGroupLoadBalancerInfo)),
metadata={AttrMeta.PROPERTY_NAME: "LoadBalancerInfo"},
)
"""Doc: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-resource-codedeploy-deploymentgroup.html#cfn-codedeploy-deploymentgroup-loadbalancerinfo"""
p_OnPremisesInstanceTagFilters: typing.List[typing.Union['PropDeploymentGroupTagFilter', dict]] = attr.ib(
default=None,
converter=PropDeploymentGroupTagFilter.from_list,
validator=attr.validators.optional(attr.validators.deep_iterable(member_validator=attr.validators.instance_of(PropDeploymentGroupTagFilter), iterable_validator=attr.validators.instance_of(list))),
metadata={AttrMeta.PROPERTY_NAME: "OnPremisesInstanceTagFilters"},
)
"""Doc: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-resource-codedeploy-deploymentgroup.html#cfn-codedeploy-deploymentgroup-onpremisesinstancetagfilters"""
p_OnPremisesTagSet: typing.Union['PropDeploymentGroupOnPremisesTagSet', dict] = attr.ib(
default=None,
converter=PropDeploymentGroupOnPremisesTagSet.from_dict,
validator=attr.validators.optional(attr.validators.instance_of(PropDeploymentGroupOnPremisesTagSet)),
metadata={AttrMeta.PROPERTY_NAME: "OnPremisesTagSet"},
)
"""Doc: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-resource-codedeploy-deploymentgroup.html#cfn-codedeploy-deploymentgroup-onpremisestagset"""
p_TriggerConfigurations: typing.List[typing.Union['PropDeploymentGroupTriggerConfig', dict]] = attr.ib(
default=None,
converter=PropDeploymentGroupTriggerConfig.from_list,
validator=attr.validators.optional(attr.validators.deep_iterable(member_validator=attr.validators.instance_of(PropDeploymentGroupTriggerConfig), iterable_validator=attr.validators.instance_of(list))),
metadata={AttrMeta.PROPERTY_NAME: "TriggerConfigurations"},
)
"""Doc: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-resource-codedeploy-deploymentgroup.html#cfn-codedeploy-deploymentgroup-triggerconfigurations"""
| 69.140887
| 303
| 0.791858
| 6,773
| 70,178
| 8.117083
| 0.024657
| 0.150972
| 0.038416
| 0.05937
| 0.876694
| 0.875948
| 0.856958
| 0.79646
| 0.79646
| 0.79646
| 0
| 0.001521
| 0.091282
| 70,178
| 1,014
| 304
| 69.209073
| 0.860566
| 0.345279
| 0
| 0.399247
| 0
| 0
| 0.122272
| 0.094619
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| false
| 0
| 0.007533
| 0
| 0.27307
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 6
|
c4f2d7383893447b52555c845e1d82953c2b60c0
| 129
|
py
|
Python
|
middleware/url.py
|
openspending/cosmopolitan
|
f9cdfcee5138fb7157f511db18f631008a273fa5
|
[
"MIT"
] | 4
|
2015-11-25T14:21:22.000Z
|
2017-04-07T08:05:58.000Z
|
middleware/url.py
|
openspending/cosmopolitan
|
f9cdfcee5138fb7157f511db18f631008a273fa5
|
[
"MIT"
] | 21
|
2015-11-29T16:23:52.000Z
|
2017-06-06T10:24:49.000Z
|
middleware/url.py
|
openspending/cosmopolitan
|
f9cdfcee5138fb7157f511db18f631008a273fa5
|
[
"MIT"
] | 4
|
2016-01-21T19:06:03.000Z
|
2021-05-07T16:12:57.000Z
|
class LowercaseMiddleware(object):
def process_request(self, request):
request.path_info = request.path_info.lower()
| 32.25
| 53
| 0.744186
| 15
| 129
| 6.2
| 0.666667
| 0.236559
| 0.322581
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.155039
| 129
| 3
| 54
| 43
| 0.853211
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0.333333
| false
| 0
| 0
| 0
| 0.666667
| 0
| 1
| 0
| 0
| null | 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 1
| 0
|
0
| 6
|
1ef774deadbef6fce5e26d721b829b6799abfb69
| 102
|
py
|
Python
|
code/tests/test_deck.py
|
kiranajij/29Cards
|
b64238145590d3b81c16c8f70069cfb1d69f0c89
|
[
"MIT"
] | null | null | null |
code/tests/test_deck.py
|
kiranajij/29Cards
|
b64238145590d3b81c16c8f70069cfb1d69f0c89
|
[
"MIT"
] | null | null | null |
code/tests/test_deck.py
|
kiranajij/29Cards
|
b64238145590d3b81c16c8f70069cfb1d69f0c89
|
[
"MIT"
] | null | null | null |
from unittest import TestCase
from code.backend.game import deck
class TestDeck(TestCase):
pass
| 12.75
| 34
| 0.784314
| 14
| 102
| 5.714286
| 0.785714
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.166667
| 102
| 7
| 35
| 14.571429
| 0.941176
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| true
| 0.25
| 0.5
| 0
| 0.75
| 0
| 1
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 1
| 1
| 0
| 1
| 0
|
0
| 6
|
48321845fdc5e56a2650b7c8c7237d15d1f42796
| 8,202
|
py
|
Python
|
cloudcafe/images/v1/client.py
|
ProjectMeniscus/cloudcafe
|
fa8fd796b303f0c5f0d6e98b2b5d01f6ea8fefe9
|
[
"Apache-2.0"
] | null | null | null |
cloudcafe/images/v1/client.py
|
ProjectMeniscus/cloudcafe
|
fa8fd796b303f0c5f0d6e98b2b5d01f6ea8fefe9
|
[
"Apache-2.0"
] | null | null | null |
cloudcafe/images/v1/client.py
|
ProjectMeniscus/cloudcafe
|
fa8fd796b303f0c5f0d6e98b2b5d01f6ea8fefe9
|
[
"Apache-2.0"
] | 1
|
2020-11-17T19:05:08.000Z
|
2020-11-17T19:05:08.000Z
|
from cafe.engine.clients.rest import AutoMarshallingRestClient
from cloudcafe.images.v1.models.image import Image, ImageMinList
from cloudcafe.images.v1.models.member import MemberList
class ImagesClient(AutoMarshallingRestClient):
"""Client for Image API."""
def __init__(self, url, auth_token, serialize_format, deserialize_format):
"""
@param url: Base URL for the compute service
@type url: String
@param auth_token: Auth token to be used for all requests
@type auth_token: String
@param serialize_format: Format for serializing requests
@type serialize_format: String
@param deserialize_format: Format for de-serializing responses
@type deserialize_format: String
"""
super(ImagesClient, self).__init__(
serialize_format,
deserialize_format)
self.auth_token = auth_token
self.default_headers['X-Auth-Token'] = auth_token
self.default_headers['Content-Type'] = 'application/{0}'.format(
self.serialize_format)
self.default_headers['Accept'] = 'application/{0}'.format(
self.deserialize_format)
self.url = url
def list_images(self, requestslib_kwargs=None):
url = '{0}/images'.format(self.url)
return self.request('GET', url, response_entity_type=Image,
requestslib_kwargs=requestslib_kwargs)
def list_images_detail(self, parameters_list, requestslib_kwargs=None):
url = '{0}/images/detail'.format(self.url)
return self.request('GET', url, params=parameters_list,
response_entity_type=Image,
requestslib_kwargs=requestslib_kwargs)
def get_image(self, image_id, requestslib_kwargs=None):
url = '{0}/images/{1}'.format(self.url, image_id)
return self.request('GET', url, response_entity_type=Image,
requestslib_kwargs=requestslib_kwargs)
def delete_image(self, image_id, requestslib_kwargs=None):
url = '{0}/images/{1}'.format(self.url, image_id)
return self.request('DELETE', url, response_entity_type=Image,
requestslib_kwargs=requestslib_kwargs)
def filter_images_list(self, parameters_list, requestslib_kwargs=None):
url = '{0}/images'.format(self.url)
return self.request('GET', url, params=parameters_list,
response_entity_type=Image,
requestslib_kwargs=requestslib_kwargs)
def retrieve_metadata(self, image_id, requestslib_kwargs=None):
url = '{0}/images/{1}'.format(self.url, image_id)
return self.request('HEAD',
url,
requestslib_kwargs=requestslib_kwargs)
def retrieve_raw_image_data(self, image_id, requestslib_kwargs=None):
url = '{0}/images/{1}'.format(self.url, image_id)
return self.request('GET', url, requestslib_kwargs=requestslib_kwargs)
def add_image(self, image_name, image_data=None, headers=None,
image_meta_id=None, image_meta_store=None,
image_meta_disk_format=None,
image_meta_container_format=None, image_meta_size=None,
image_meta_checksum=None, image_meta_is_public=None,
image_meta_min_ram=None, image_meta_min_disk=None,
image_meta_owner=None, image_meta_property=None,
image_meta_location=None,
requestslib_kwargs=None):
headers = headers if headers else {}
if image_data:
headers['Content-Type'] = 'application/octet-stream'
headers['x-image-meta-name'] = image_name
headers['x-image-meta-id'] = image_meta_id
headers['x-image-meta-store'] = image_meta_store
headers['x-image-meta-disk-format'] = image_meta_disk_format
headers['x-image-meta-container-format'] = image_meta_container_format
headers['x-image-meta-size'] = image_meta_size
headers['x-image-meta-checksum'] = image_meta_checksum
headers['x-image-meta-is-public'] = image_meta_is_public
headers['x-image-meta-min-ram'] = image_meta_min_ram
headers['x-image-meta-min-disk'] = image_meta_min_disk
headers['x-image-meta-owner'] = image_meta_owner
headers['x-image-meta-location'] = image_meta_location
if image_meta_property:
for key, val in image_meta_property.items():
headers['x-image-meta-property-{0}'.format(key)] = val
url = '{0}/images'.format(self.url)
return self.request('POST', url, headers=headers, data=image_data,
response_entity_type=Image,
requestslib_kwargs=requestslib_kwargs)
def list_image_membership(self, image_id, requestslib_kwargs=None):
url = '{0}/images/{1}/members'.format(self.url, image_id)
return self.request('GET', url,
response_entity_type=MemberList,
requestslib_kwargs=requestslib_kwargs)
def update_image(self, image_id, image_data=None, headers=None,
image_meta_name=None, image_meta_store=None,
image_meta_disk_format=None,
image_meta_container_format=None, image_meta_size=None,
image_meta_checksum=None, image_meta_is_public=None,
image_meta_min_ram=None, image_meta_min_disk=None,
image_meta_owner=None, image_meta_property=None,
image_meta_location=None,
requestslib_kwargs=None):
headers = headers if headers else {}
if image_data:
headers['Content-Type'] = 'application/octet-stream'
headers['x-image-meta-name'] = image_meta_name
headers['x-image-meta-store'] = image_meta_store
headers['x-image-meta-disk-format'] = image_meta_disk_format
headers['x-image-meta-container-format'] = image_meta_container_format
headers['x-image-meta-size'] = image_meta_size
headers['x-image-meta-checksum'] = image_meta_checksum
headers['x-image-meta-is-public'] = image_meta_is_public
headers['x-image-meta-min-ram'] = image_meta_min_ram
headers['x-image-meta-min-disk'] = image_meta_min_disk
headers['x-image-meta-owner'] = image_meta_owner
headers['x-image-meta-location'] = image_meta_location
if image_meta_property:
for key, val in image_meta_property.items():
headers['x-image-meta-property-{0}'.format(key)] = val
url = '{0}/images/{1}'.format(self.url, image_id)
return self.request('PUT', url, headers=headers,
data=image_data,
response_entity_type=Image,
requestslib_kwargs=requestslib_kwargs)
def list_shared_images(self, member_id, requestslib_kwargs=None):
url = '{0}/shared-images/{1}'.format(self.url, member_id)
return self.request('GET', url, response_entity_type=ImageMinList,
requestslib_kwargs=requestslib_kwargs)
def add_member_to_image(self, image_id, member_id,
requestslib_kwargs=None):
url = '{0}/images/{1}/members/{2}'.format(self.url, image_id,
member_id)
return self.request('PUT', url, requestslib_kwargs=requestslib_kwargs)
def delete_member_from_image(self, image_id, member_id,
requestslib_kwargs=None):
url = '{0}/images/{1}/members/{2}'.format(self.url, image_id,
member_id)
return self.request('DELETE', url,
requestslib_kwargs=requestslib_kwargs)
def replace_members_list(self, image_id, requestslib_kwargs=None):
url = '{0}/images/{1}/members'.format(self.url, image_id)
return self.request('PUT', url, requestslib_kwargs=requestslib_kwargs)
| 48.532544
| 78
| 0.628139
| 967
| 8,202
| 5.050672
| 0.107549
| 0.138206
| 0.066544
| 0.087019
| 0.814087
| 0.785012
| 0.743448
| 0.708436
| 0.708436
| 0.679771
| 0
| 0.00534
| 0.269325
| 8,202
| 168
| 79
| 48.821429
| 0.809611
| 0.042794
| 0
| 0.638462
| 0
| 0
| 0.120654
| 0.063224
| 0
| 0
| 0
| 0
| 0
| 1
| 0.115385
| false
| 0
| 0.023077
| 0
| 0.253846
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 6
|
487e3375c0b926ae05d698dfcdbe65b482436572
| 26
|
py
|
Python
|
hello-fortran/tests/test.py
|
Nicholaswogan/skbuild-f2py-examples
|
e47d0a9ce483e54b678e31789dbfcc90ff4a8e74
|
[
"MIT"
] | 4
|
2021-07-28T02:16:52.000Z
|
2021-12-23T00:20:21.000Z
|
hello-fortran/tests/test.py
|
Nicholaswogan/skbuild-f2py-examples
|
e47d0a9ce483e54b678e31789dbfcc90ff4a8e74
|
[
"MIT"
] | 1
|
2021-09-14T21:17:49.000Z
|
2021-09-14T23:17:47.000Z
|
hello-fortran/tests/test.py
|
Nicholaswogan/skbuild-f2py-examples
|
e47d0a9ce483e54b678e31789dbfcc90ff4a8e74
|
[
"MIT"
] | null | null | null |
import hello
hello.hello()
| 13
| 13
| 0.807692
| 4
| 26
| 5.25
| 0.5
| 0.952381
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.076923
| 26
| 2
| 13
| 13
| 0.875
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| true
| 0
| 0.5
| 0
| 0.5
| 0
| 1
| 1
| 0
| null | 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 0
| 0
| 0
|
0
| 6
|
4887050238bb36237ec570732b2b052a0729f89c
| 100
|
py
|
Python
|
pycausalmatch/__init__.py
|
bartdegroot92/pycausalmatch
|
045e50743296fea17eb10abd3ebc1db864885162
|
[
"MIT"
] | 2
|
2021-09-02T16:20:27.000Z
|
2021-09-07T09:14:04.000Z
|
pycausalmatch/__init__.py
|
bartdegroot92/pycausalmatch
|
045e50743296fea17eb10abd3ebc1db864885162
|
[
"MIT"
] | 3
|
2021-04-15T09:55:18.000Z
|
2021-06-22T21:55:51.000Z
|
pycausalmatch/__init__.py
|
bartdegroot92/pycausalmatch
|
045e50743296fea17eb10abd3ebc1db864885162
|
[
"MIT"
] | 2
|
2021-06-22T15:37:15.000Z
|
2021-07-27T14:35:42.000Z
|
from pycausalmatch.__version__ import __version__
from pycausalmatch.main import R_MarketMatching
| 20
| 49
| 0.88
| 11
| 100
| 7.181818
| 0.636364
| 0.43038
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.1
| 100
| 5
| 50
| 20
| 0.877778
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| true
| 0
| 1
| 0
| 1
| 0
| 1
| 0
| 0
| null | 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 0
| 1
| 0
|
0
| 6
|
6f84cbde10103dcb80c03dbb764da0a2aa6cdf12
| 55
|
py
|
Python
|
applications/Rekall/controllers/audit.py
|
scudette/rekall-agent-server
|
e553f1ae5279f75a8f5b0c0c4847766b60ed86eb
|
[
"BSD-3-Clause"
] | 21
|
2018-02-16T17:43:59.000Z
|
2021-12-29T12:08:28.000Z
|
applications/Rekall/controllers/audit.py
|
scudette/rekall-agent-server
|
e553f1ae5279f75a8f5b0c0c4847766b60ed86eb
|
[
"BSD-3-Clause"
] | 12
|
2017-11-01T14:54:29.000Z
|
2018-02-01T22:02:12.000Z
|
applications/Rekall/controllers/audit.py
|
scudette/rekall-agent-server
|
e553f1ae5279f75a8f5b0c0c4847766b60ed86eb
|
[
"BSD-3-Clause"
] | 8
|
2018-10-08T03:48:00.000Z
|
2022-03-31T12:13:01.000Z
|
def search():
return dict(q=request.vars.q or "")
| 13.75
| 39
| 0.618182
| 9
| 55
| 3.777778
| 0.888889
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.2
| 55
| 3
| 40
| 18.333333
| 0.772727
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0.5
| true
| 0
| 0
| 0.5
| 1
| 0
| 1
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 1
| 1
| 0
| 0
| 1
| 1
| 0
|
0
| 6
|
6fe5032731b39f6a93dc2e138a562dc683caf1ed
| 5,193
|
py
|
Python
|
neuroio/notifications/v1.py
|
neuroio/neuroio-python
|
160f96515877e5e2ee0e888b7424c77cb2d7496a
|
[
"MIT"
] | null | null | null |
neuroio/notifications/v1.py
|
neuroio/neuroio-python
|
160f96515877e5e2ee0e888b7424c77cb2d7496a
|
[
"MIT"
] | 6
|
2021-09-06T08:23:09.000Z
|
2021-11-10T16:19:20.000Z
|
neuroio/notifications/v1.py
|
neuroio/neuroio-python
|
160f96515877e5e2ee0e888b7424c77cb2d7496a
|
[
"MIT"
] | null | null | null |
from typing import List, Union
from httpx import Response
from neuroio.base import APIBase, APIBaseAsync, APIBaseBase
from neuroio.constants import (
EntryLiveness,
EntryMood,
EntryResult,
HttpMethod,
Sex,
sentinel,
)
from neuroio.utils import request_dict_processing, request_query_processing
class NotificationsBase(APIBaseBase):
def get_url(self, key: str = None) -> str:
if key:
return self.base_url + f"/v1/notifications/{key}/"
else:
return self.base_url + "/v1/notifications/"
class Impl(APIBase, NotificationsBase):
def create(
self,
name: str,
http_method: HttpMethod,
destination_url: str,
is_active: bool = True,
moods: Union[List[EntryMood], object] = sentinel,
results: Union[List[EntryResult], object] = sentinel,
liveness: Union[List[EntryLiveness], object] = sentinel,
age_from: Union[int, object] = sentinel,
age_to: Union[int, object] = sentinel,
sex: Union[List[Sex], object] = sentinel,
sources: Union[List[int], object] = sentinel,
persons_groups: Union[List[int], object] = sentinel,
) -> Response:
data = request_dict_processing(locals(), ["self"])
with self.get_client() as client:
return client.post(url=self.get_url(), json=data)
def list(
self,
q: Union[str, object] = sentinel,
spaces_ids: Union[List[int], object] = sentinel,
limit: int = 20,
offset: int = 0,
) -> Response:
data = request_query_processing(locals(), ["self"])
with self.get_client() as client:
return client.get(url=self.get_url(), params=data)
def get(self, id: int) -> Response:
with self.get_client() as client:
return client.get(url=self.get_url(f"{id}"))
def update(
self,
id: int,
name: str,
http_method: HttpMethod,
destination_url: str,
is_active: bool = True,
moods: Union[List[EntryMood], object] = sentinel,
results: Union[List[EntryResult], object] = sentinel,
liveness: Union[List[EntryLiveness], object] = sentinel,
age_from: Union[int, object] = sentinel,
age_to: Union[int, object] = sentinel,
sex: Union[List[Sex], object] = sentinel,
sources: Union[List[int], object] = sentinel,
persons_groups: Union[List[int], object] = sentinel,
) -> Response:
data = request_dict_processing(locals(), ["self", "id"])
with self.get_client() as client:
return client.patch(url=self.get_url(f"{id}"), json=data)
def delete(self, id: int) -> Response:
with self.get_client() as client:
return client.delete(url=self.get_url(f"{id}"))
class ImplAsync(APIBaseAsync, NotificationsBase):
async def create(
self,
name: str,
http_method: HttpMethod,
destination_url: str,
is_active: bool = True,
moods: Union[List[EntryMood], object] = sentinel,
results: Union[List[EntryResult], object] = sentinel,
liveness: Union[List[EntryLiveness], object] = sentinel,
age_from: Union[int, object] = sentinel,
age_to: Union[int, object] = sentinel,
sex: Union[List[Sex], object] = sentinel,
sources: Union[List[int], object] = sentinel,
persons_groups: Union[List[int], object] = sentinel,
) -> Response:
data = request_dict_processing(locals(), ["self"])
async with self.get_client() as client:
return await client.post(url=self.get_url(), json=data)
async def list(
self,
q: Union[str, object] = sentinel,
spaces_ids: Union[List[int], object] = sentinel,
limit: int = 20,
offset: int = 0,
) -> Response:
data = request_query_processing(locals(), ["self"])
async with self.get_client() as client:
return await client.get(url=self.get_url(), params=data)
async def get(self, id: int) -> Response:
async with self.get_client() as client:
return await client.get(url=self.get_url(f"{id}"))
async def update(
self,
id: int,
name: str,
http_method: HttpMethod,
destination_url: str,
is_active: bool = True,
moods: Union[List[EntryMood], object] = sentinel,
results: Union[List[EntryResult], object] = sentinel,
liveness: Union[List[EntryLiveness], object] = sentinel,
age_from: Union[int, object] = sentinel,
age_to: Union[int, object] = sentinel,
sex: Union[List[Sex], object] = sentinel,
sources: Union[List[int], object] = sentinel,
persons_groups: Union[List[int], object] = sentinel,
) -> Response:
data = request_dict_processing(locals(), ["self", "id"])
async with self.get_client() as client:
return await client.patch(url=self.get_url(f"{id}"), json=data)
async def delete(self, id: int) -> Response:
async with self.get_client() as client:
return await client.delete(url=self.get_url(f"{id}"))
| 35.087838
| 75
| 0.609667
| 611
| 5,193
| 5.075286
| 0.13257
| 0.162528
| 0.098678
| 0.058046
| 0.844889
| 0.844889
| 0.835214
| 0.835214
| 0.78652
| 0.78652
| 0
| 0.002097
| 0.265357
| 5,193
| 147
| 76
| 35.326531
| 0.810747
| 0
| 0
| 0.68254
| 0
| 0
| 0.018101
| 0.004622
| 0
| 0
| 0
| 0
| 0
| 1
| 0.047619
| false
| 0
| 0.039683
| 0
| 0.206349
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 6
|
6ffb75457a05ea3b38ff85ad8dce47521c6f146f
| 142
|
py
|
Python
|
final_race/ErrorMsg.py
|
clayshubert/AutonomousVehicles
|
72709885989b2b4266c86d6e5e0a0609b3f4a959
|
[
"MIT"
] | null | null | null |
final_race/ErrorMsg.py
|
clayshubert/AutonomousVehicles
|
72709885989b2b4266c86d6e5e0a0609b3f4a959
|
[
"MIT"
] | null | null | null |
final_race/ErrorMsg.py
|
clayshubert/AutonomousVehicles
|
72709885989b2b4266c86d6e5e0a0609b3f4a959
|
[
"MIT"
] | null | null | null |
class ErrorMsg:
float
#float64 X_One
#float64 Height_Of_Cone
#float64 Area_Of_View_Top
#float64 Area_Of_View_Bottom
#bool Stop_Now
| 17.75
| 30
| 0.788732
| 23
| 142
| 4.434783
| 0.695652
| 0.215686
| 0.254902
| 0.333333
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.067797
| 0.169014
| 142
| 7
| 31
| 20.285714
| 0.79661
| 0.697183
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| true
| 0
| 0
| 0
| 0.5
| 0
| 1
| 0
| 0
| null | 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
|
0
| 6
|
82ea6726551a7e4421f675a5186c3f0eb92f7021
| 1,085
|
py
|
Python
|
page/page.py
|
silviacui/BaiNianShop
|
45320ea1e0a458961d202259b8eb5a2d82334b5f
|
[
"MIT"
] | null | null | null |
page/page.py
|
silviacui/BaiNianShop
|
45320ea1e0a458961d202259b8eb5a2d82334b5f
|
[
"MIT"
] | null | null | null |
page/page.py
|
silviacui/BaiNianShop
|
45320ea1e0a458961d202259b8eb5a2d82334b5f
|
[
"MIT"
] | null | null | null |
# -*- coding: utf-8 -*-
from .home_page import *
from .mine_page import *
from .login_page import *
from .settings_page import *
from .about_bainian_page import *
from .address_page import *
from .add_address_page import *
from .cities_page import *
from .change_password_page import *
class Page():
def __init__(self, driver):
self.driver = driver
@property
def home(self):
return HomePage(self.driver)
@property
def mine(self):
return MinePage(self.driver)
@property
def login(self):
return LoginPage(self.driver)
@property
def settings(self):
return SettingsPage(self.driver)
@property
def about_bainian(self):
return AboutBainianPage(self.driver)
@property
def address(self):
return AddressPage(self.driver)
@property
def add_address(self):
return AddAddressPage(self.driver)
@property
def cities(self):
return CitiesPage(self.driver)
@property
def change_password(self):
return ChangePasswordPage(self.driver)
| 19.727273
| 46
| 0.665438
| 126
| 1,085
| 5.579365
| 0.261905
| 0.156472
| 0.217639
| 0.238976
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.001218
| 0.243318
| 1,085
| 54
| 47
| 20.092593
| 0.855055
| 0.019355
| 0
| 0.230769
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0.25641
| false
| 0.076923
| 0.230769
| 0.230769
| 0.74359
| 0
| 0
| 0
| 0
| null | 0
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 0
| 1
| 1
| 0
|
0
| 6
|
82ffab19febe2fbe186ed9ce8d0cd9efd06a14f8
| 8,125
|
py
|
Python
|
authors/tests/data/test_like_dislike_comments.py
|
andela/ah-the-jedi-backend
|
ba429dfcec577bd6d52052673c1c413835f65988
|
[
"BSD-3-Clause"
] | 1
|
2019-12-25T18:59:34.000Z
|
2019-12-25T18:59:34.000Z
|
authors/tests/data/test_like_dislike_comments.py
|
katherine95/ah-the-jedi-backend
|
ba429dfcec577bd6d52052673c1c413835f65988
|
[
"BSD-3-Clause"
] | 26
|
2019-04-23T11:20:35.000Z
|
2022-03-11T23:45:54.000Z
|
authors/tests/data/test_like_dislike_comments.py
|
katherine95/ah-the-jedi-backend
|
ba429dfcec577bd6d52052673c1c413835f65988
|
[
"BSD-3-Clause"
] | 8
|
2019-05-21T06:54:34.000Z
|
2019-11-18T19:45:22.000Z
|
import json
import os
import jwt
from authors.apps.authentication.models import User
from rest_framework import status
from .base_test import BaseTest
from authors.settings import SECRET_KEY
from django.urls import reverse
from rest_framework.test import APIClient
from .data import Data
class LikeDislikeTestCase(BaseTest):
"""
This class defines the test suite for like and dislike
cases.
"""
def setUp(self):
""" Define the test client and required test variables. """
# self.client = APIClient()
# self.base_data = Data()
BaseTest.setUp(self)
signup = self.signup_user()
uid = signup.data.get('data')['id']
token = signup.data.get('data')['token']
self.activate_user(uid=uid, token=token)
login = self.login_user()
self.token = login.data['token']
def test_a_user_can_like_a_comment(self):
"""
Test an authenticated user can successfully like a comment
"""
article = self.create_article()
slug = article.data['data']['slug']
comment = self.create_comment(slug)
comment_id = comment.data.get('id')
like = self.client.post('/api/articles/{}/comments/{}/like/'.format(slug, comment_id),
HTTP_AUTHORIZATION='Bearer ' +
self.token,
format='json')
self.assertEqual(like.status_code, 200)
def test_an_unauthenticated_user_cannot_like_a_comment(self):
"""
Test an unauthenticated user cannot like a comment
"""
article = self.create_article()
slug = article.data['data']['slug']
comment = self.create_comment(slug)
comment_id = comment.data.get('id')
like = self.client.post('/api/articles/{}/comments/{}/like/'.format(slug, comment_id),
format='json')
self.assertEqual(
like.data['detail'], "Authentication credentials were not provided.")
self.assertEqual(like.status_code, 401)
def test_cannot_like_comment_with_non_exitent_slug(self):
"""
Test a user cannot like with an unexisting article slug
"""
article = self.create_article()
slug = article.data['data']['slug']
comment = self.create_comment(slug)
comment_id = comment.data.get('id')
like = self.client.post('/api/articles/{}/comments/{}/like/'.format("abc", comment_id),
HTTP_AUTHORIZATION='Bearer ' +
self.token,
format='json')
like = json.loads(like.content.decode('utf-8'))
self.assertEqual(like['error'], 'Article with slug abc not found')
self.assertEqual(int(like['status']), 404)
def test_cannot_like_comment_with_non_exitent_comment_id(self):
"""
Test a user cannot like with an unexisting article comment id
"""
article = self.create_article()
slug = article.data['data']['slug']
comment = self.create_comment(slug)
comment_id = comment.data.get('id')
like = self.client.post('/api/articles/{}/comments/{}/like/'.format(slug, 70000),
HTTP_AUTHORIZATION='Bearer ' +
self.token,
format='json')
like = json.loads(like.content.decode('utf-8'))
self.assertEqual(like['error'], 'Comment with id 70000 not found')
self.assertEqual(int(like['status']), 404)
def test_a_user_can_delete_like(self):
"""
Test an authenticated user can delete a like
"""
article = self.create_article()
slug = article.data['data']['slug']
comment = self.create_comment(slug)
comment_id = comment.data.get('id')
self.client.post('/api/articles/{}/comments/{}/like/'.format(slug, comment_id),
HTTP_AUTHORIZATION='Bearer ' +
self.token,
format='json')
like = self.client.post('/api/articles/{}/comments/{}/like/'.format(slug, comment_id),
HTTP_AUTHORIZATION='Bearer ' +
self.token,
format='json')
self.assertEqual(like.status_code, 200)
def test_a_user_can_dislike_a_comment(self):
"""
Test an authenticated user can successfully disike a comment
"""
article = self.create_article()
slug = article.data['data']['slug']
comment = self.create_comment(slug)
comment_id = comment.data.get('id')
dislike = self.client.post('/api/articles/{}/comments/{}/dislike/'.format(slug, comment_id),
HTTP_AUTHORIZATION='Bearer ' +
self.token,
format='json')
self.assertEqual(dislike.status_code, 200)
def test_an_unauthenticated_user_cannot_dislike_a_comment(self):
"""
Test an unauthenticated user cannot dislike a comment
"""
article = self.create_article()
slug = article.data['data']['slug']
comment = self.create_comment(slug)
comment_id = comment.data.get('id')
dislike = self.client.post('/api/articles/{}/comments/{}/dislike/'.format(slug, comment_id),
format='json')
self.assertEqual(
dislike.data['detail'], "Authentication credentials were not provided.")
self.assertEqual(dislike.status_code, 401)
def test_cannot_dislike_comment_with_non_exitent_slug(self):
"""
Test a user cannot dislike with an unexisting article slug
"""
article = self.create_article()
slug = article.data['data']['slug']
comment = self.create_comment(slug)
comment_id = comment.data.get('id')
dislike = self.client.post('/api/articles/{}/comments/{}/dislike/'.format("abc", comment_id),
HTTP_AUTHORIZATION='Bearer ' +
self.token,
format='json')
dislike = json.loads(dislike.content.decode('utf-8'))
self.assertEqual(dislike['error'], 'Article with slug abc not found')
self.assertEqual(int(dislike['status']), 404)
def test_cannot_dislike_comment_with_non_exitent_comment_id(self):
"""
Test a user cannot dislike with an unexisting article comment id
"""
article = self.create_article()
slug = article.data['data']['slug']
comment = self.create_comment(slug)
comment_id = comment.data.get('id')
dislike = self.client.post('/api/articles/{}/comments/{}/dislike/'.format(slug, 70000),
HTTP_AUTHORIZATION='Bearer ' +
self.token,
format='json')
dislike = json.loads(dislike.content.decode('utf-8'))
self.assertEqual(dislike['error'], 'Comment with id 70000 not found')
self.assertEqual(int(dislike['status']), 404)
def test_a_user_can_delete_dislike(self):
"""
Test an authenticated user can delete a dislike
"""
article = self.create_article()
slug = article.data['data']['slug']
comment = self.create_comment(slug)
comment_id = comment.data.get('id')
self.client.post('/api/articles/{}/comments/{}/dislike/'.format(slug, comment_id),
HTTP_AUTHORIZATION='Bearer ' +
self.token,
format='json')
dislike = self.client.post('/api/articles/{}/comments/{}/dislike/'.format(slug, comment_id),
HTTP_AUTHORIZATION='Bearer ' +
self.token,
format='json')
self.assertEqual(dislike.status_code, 200)
| 38.507109
| 101
| 0.566277
| 865
| 8,125
| 5.168786
| 0.108671
| 0.068888
| 0.052337
| 0.045627
| 0.865578
| 0.855513
| 0.847238
| 0.847238
| 0.77656
| 0.711474
| 0
| 0.009683
| 0.3136
| 8,125
| 210
| 102
| 38.690476
| 0.792003
| 0.089231
| 0
| 0.681481
| 0
| 0
| 0.134397
| 0.059639
| 0
| 0
| 0
| 0
| 0.118519
| 1
| 0.081481
| false
| 0
| 0.074074
| 0
| 0.162963
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 6
|
d21812bed17beb8bc384215d8f5dcafc4dd9b43a
| 56
|
py
|
Python
|
polya/__init__.py
|
johapark/polya
|
dce321dbfd01c0bf5b17585b358b1e87787f0725
|
[
"MIT"
] | null | null | null |
polya/__init__.py
|
johapark/polya
|
dce321dbfd01c0bf5b17585b358b1e87787f0725
|
[
"MIT"
] | null | null | null |
polya/__init__.py
|
johapark/polya
|
dce321dbfd01c0bf5b17585b358b1e87787f0725
|
[
"MIT"
] | null | null | null |
from . import io
from . import plot
from . import utils
| 14
| 19
| 0.732143
| 9
| 56
| 4.555556
| 0.555556
| 0.731707
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.214286
| 56
| 3
| 20
| 18.666667
| 0.931818
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| true
| 0
| 1
| 0
| 1
| 0
| 1
| 0
| 0
| null | 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 0
| 1
| 0
|
0
| 6
|
d21e71102cba89cac943f40875134a3dadb694f6
| 194
|
py
|
Python
|
sadmin/admin.py
|
Mustazur1234/Movement-Pass-Clone
|
c1d9805bdb3e0fe881b9631c594cacfe4009bd94
|
[
"MIT"
] | 29
|
2021-06-01T20:38:00.000Z
|
2021-09-14T05:02:07.000Z
|
sadmin/admin.py
|
Mustazur1234/Movement-Pass-Clone
|
c1d9805bdb3e0fe881b9631c594cacfe4009bd94
|
[
"MIT"
] | 1
|
2021-06-20T13:50:12.000Z
|
2021-06-20T14:43:08.000Z
|
sadmin/admin.py
|
Mustazur1234/Movement-Pass-Clone
|
c1d9805bdb3e0fe881b9631c594cacfe4009bd94
|
[
"MIT"
] | 25
|
2021-06-03T13:29:47.000Z
|
2021-08-31T16:04:43.000Z
|
from django.contrib import admin
from .models import IDtype, PassUser, Gender
# Register your models here.
admin.site.register(IDtype)
admin.site.register(PassUser)
admin.site.register(Gender)
| 24.25
| 44
| 0.809278
| 27
| 194
| 5.814815
| 0.481481
| 0.171975
| 0.324841
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.097938
| 194
| 8
| 45
| 24.25
| 0.897143
| 0.134021
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| true
| 0.4
| 0.4
| 0
| 0.4
| 0
| 1
| 0
| 0
| null | 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 1
| 1
| 0
| 0
| 0
|
0
| 6
|
d233cc284396565da5b2a62cbb7a73d3482f2af9
| 150
|
py
|
Python
|
libs/yowsup/yowsup/yowsup/layers/protocol_groups/protocolentities/test_iq_groups.py
|
akshitpradhan/TomHack
|
837226e7b38de1140c19bc2d478eeb9e379ed1fd
|
[
"MIT"
] | 22
|
2017-07-14T20:01:17.000Z
|
2022-03-08T14:22:39.000Z
|
libs/yowsup/yowsup/yowsup/layers/protocol_groups/protocolentities/test_iq_groups.py
|
akshitpradhan/TomHack
|
837226e7b38de1140c19bc2d478eeb9e379ed1fd
|
[
"MIT"
] | 6
|
2017-07-14T21:03:50.000Z
|
2021-06-10T19:08:32.000Z
|
libs/yowsup/yowsup/yowsup/layers/protocol_groups/protocolentities/test_iq_groups.py
|
akshitpradhan/TomHack
|
837226e7b38de1140c19bc2d478eeb9e379ed1fd
|
[
"MIT"
] | 13
|
2017-07-14T20:13:14.000Z
|
2020-11-12T08:06:05.000Z
|
from yowsup.layers.protocol_iq.protocolentities.test_iq import IqProtocolEntityTest
class GroupsIqProtocolEntityTest(IqProtocolEntityTest):
pass
| 30
| 83
| 0.873333
| 14
| 150
| 9.214286
| 0.857143
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.08
| 150
| 4
| 84
| 37.5
| 0.934783
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| true
| 0.333333
| 0.333333
| 0
| 0.666667
| 0
| 1
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 1
| 1
| 0
| 1
| 0
|
0
| 6
|
9643c9f61f234208217a38f2e625b746ceb8655a
| 29
|
py
|
Python
|
test/integration/ImportLocal/parent use.py
|
HighSchoolHacking/GLS-Draft
|
9e418b6290e7c8e3f2da87668784bdba1cde5a76
|
[
"MIT"
] | 30
|
2019-10-29T12:47:50.000Z
|
2022-02-12T06:41:39.000Z
|
test/integration/ImportLocal/parent use.py
|
HighSchoolHacking/GLS-Draft
|
9e418b6290e7c8e3f2da87668784bdba1cde5a76
|
[
"MIT"
] | 247
|
2017-09-21T17:11:18.000Z
|
2019-10-08T12:59:07.000Z
|
test/integration/ImportLocal/parent use.py
|
HighSchoolHacking/GLS-Draft
|
9e418b6290e7c8e3f2da87668784bdba1cde5a76
|
[
"MIT"
] | 17
|
2017-10-01T16:53:20.000Z
|
2018-11-28T07:20:35.000Z
|
#
from bbb import Eee, Fff
#
| 7.25
| 24
| 0.655172
| 5
| 29
| 3.8
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.241379
| 29
| 3
| 25
| 9.666667
| 0.863636
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| true
| 0
| 1
| 0
| 1
| 0
| 1
| 1
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 0
| 1
| 0
|
0
| 6
|
9683f9743abcf1ac0af85e3fcedb2969e8da61f7
| 92
|
py
|
Python
|
HelloWorld/Python01/_hello/kwlist.py
|
grtlinux/KieaPython
|
5539a9e3625864aa20624b8d684dbe5828dac3d1
|
[
"Apache-2.0"
] | 1
|
2022-01-29T08:32:11.000Z
|
2022-01-29T08:32:11.000Z
|
HelloWorld/Python01/_hello/kwlist.py
|
grtlinux/KieaPython
|
5539a9e3625864aa20624b8d684dbe5828dac3d1
|
[
"Apache-2.0"
] | null | null | null |
HelloWorld/Python01/_hello/kwlist.py
|
grtlinux/KieaPython
|
5539a9e3625864aa20624b8d684dbe5828dac3d1
|
[
"Apache-2.0"
] | 1
|
2022-01-29T08:32:12.000Z
|
2022-01-29T08:32:12.000Z
|
# file: kwlist.py
import keyword
print keyword.kwlist
print len(keyword.kwlist)
| 10.222222
| 26
| 0.695652
| 12
| 92
| 5.333333
| 0.583333
| 0.40625
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.228261
| 92
| 8
| 27
| 11.5
| 0.901408
| 0.163043
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | null | 0
| 0.333333
| null | null | 0.666667
| 1
| 0
| 0
| null | 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 1
| 0
| 0
| 1
|
0
| 6
|
96acda574ae6e1b98d7a43b2ea47ffcaf0bb41d4
| 201
|
py
|
Python
|
api/admin.py
|
Arlefreak/PlusProjectAPI
|
339f121ab9041a939c0fcb6287793519b0e38d5c
|
[
"MIT"
] | null | null | null |
api/admin.py
|
Arlefreak/PlusProjectAPI
|
339f121ab9041a939c0fcb6287793519b0e38d5c
|
[
"MIT"
] | null | null | null |
api/admin.py
|
Arlefreak/PlusProjectAPI
|
339f121ab9041a939c0fcb6287793519b0e38d5c
|
[
"MIT"
] | null | null | null |
from django.contrib import admin
from api.models import *
admin.site.register(Project)
admin.site.register(Payment)
admin.site.register(Task)
admin.site.register(Client)
# Register your models here.
| 20.1
| 32
| 0.800995
| 29
| 201
| 5.551724
| 0.517241
| 0.223602
| 0.42236
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.094527
| 201
| 9
| 33
| 22.333333
| 0.884615
| 0.129353
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| true
| 0
| 0.333333
| 0
| 0.333333
| 0
| 1
| 0
| 0
| null | 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 0
| 0
| 0
|
0
| 6
|
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.