hexsha
string | size
int64 | ext
string | lang
string | max_stars_repo_path
string | max_stars_repo_name
string | max_stars_repo_head_hexsha
string | max_stars_repo_licenses
list | max_stars_count
int64 | max_stars_repo_stars_event_min_datetime
string | max_stars_repo_stars_event_max_datetime
string | max_issues_repo_path
string | max_issues_repo_name
string | max_issues_repo_head_hexsha
string | max_issues_repo_licenses
list | max_issues_count
int64 | max_issues_repo_issues_event_min_datetime
string | max_issues_repo_issues_event_max_datetime
string | max_forks_repo_path
string | max_forks_repo_name
string | max_forks_repo_head_hexsha
string | max_forks_repo_licenses
list | max_forks_count
int64 | max_forks_repo_forks_event_min_datetime
string | max_forks_repo_forks_event_max_datetime
string | content
string | avg_line_length
float64 | max_line_length
int64 | alphanum_fraction
float64 | qsc_code_num_words_quality_signal
int64 | qsc_code_num_chars_quality_signal
float64 | qsc_code_mean_word_length_quality_signal
float64 | qsc_code_frac_words_unique_quality_signal
float64 | qsc_code_frac_chars_top_2grams_quality_signal
float64 | qsc_code_frac_chars_top_3grams_quality_signal
float64 | qsc_code_frac_chars_top_4grams_quality_signal
float64 | qsc_code_frac_chars_dupe_5grams_quality_signal
float64 | qsc_code_frac_chars_dupe_6grams_quality_signal
float64 | qsc_code_frac_chars_dupe_7grams_quality_signal
float64 | qsc_code_frac_chars_dupe_8grams_quality_signal
float64 | qsc_code_frac_chars_dupe_9grams_quality_signal
float64 | qsc_code_frac_chars_dupe_10grams_quality_signal
float64 | qsc_code_frac_chars_replacement_symbols_quality_signal
float64 | qsc_code_frac_chars_digital_quality_signal
float64 | qsc_code_frac_chars_whitespace_quality_signal
float64 | qsc_code_size_file_byte_quality_signal
float64 | qsc_code_num_lines_quality_signal
float64 | qsc_code_num_chars_line_max_quality_signal
float64 | qsc_code_num_chars_line_mean_quality_signal
float64 | qsc_code_frac_chars_alphabet_quality_signal
float64 | qsc_code_frac_chars_comments_quality_signal
float64 | qsc_code_cate_xml_start_quality_signal
float64 | qsc_code_frac_lines_dupe_lines_quality_signal
float64 | qsc_code_cate_autogen_quality_signal
float64 | qsc_code_frac_lines_long_string_quality_signal
float64 | qsc_code_frac_chars_string_length_quality_signal
float64 | qsc_code_frac_chars_long_word_length_quality_signal
float64 | qsc_code_frac_lines_string_concat_quality_signal
float64 | qsc_code_cate_encoded_data_quality_signal
float64 | qsc_code_frac_chars_hex_words_quality_signal
float64 | qsc_code_frac_lines_prompt_comments_quality_signal
float64 | qsc_code_frac_lines_assert_quality_signal
float64 | qsc_codepython_cate_ast_quality_signal
float64 | qsc_codepython_frac_lines_func_ratio_quality_signal
float64 | qsc_codepython_cate_var_zero_quality_signal
bool | qsc_codepython_frac_lines_pass_quality_signal
float64 | qsc_codepython_frac_lines_import_quality_signal
float64 | qsc_codepython_frac_lines_simplefunc_quality_signal
float64 | qsc_codepython_score_lines_no_logic_quality_signal
float64 | qsc_codepython_frac_lines_print_quality_signal
float64 | qsc_code_num_words
int64 | qsc_code_num_chars
int64 | qsc_code_mean_word_length
int64 | qsc_code_frac_words_unique
null | qsc_code_frac_chars_top_2grams
int64 | qsc_code_frac_chars_top_3grams
int64 | qsc_code_frac_chars_top_4grams
int64 | qsc_code_frac_chars_dupe_5grams
int64 | qsc_code_frac_chars_dupe_6grams
int64 | qsc_code_frac_chars_dupe_7grams
int64 | qsc_code_frac_chars_dupe_8grams
int64 | qsc_code_frac_chars_dupe_9grams
int64 | qsc_code_frac_chars_dupe_10grams
int64 | qsc_code_frac_chars_replacement_symbols
int64 | qsc_code_frac_chars_digital
int64 | qsc_code_frac_chars_whitespace
int64 | qsc_code_size_file_byte
int64 | qsc_code_num_lines
int64 | qsc_code_num_chars_line_max
int64 | qsc_code_num_chars_line_mean
int64 | qsc_code_frac_chars_alphabet
int64 | qsc_code_frac_chars_comments
int64 | qsc_code_cate_xml_start
int64 | qsc_code_frac_lines_dupe_lines
int64 | qsc_code_cate_autogen
int64 | qsc_code_frac_lines_long_string
int64 | qsc_code_frac_chars_string_length
int64 | qsc_code_frac_chars_long_word_length
int64 | qsc_code_frac_lines_string_concat
null | qsc_code_cate_encoded_data
int64 | qsc_code_frac_chars_hex_words
int64 | qsc_code_frac_lines_prompt_comments
int64 | qsc_code_frac_lines_assert
int64 | qsc_codepython_cate_ast
int64 | qsc_codepython_frac_lines_func_ratio
int64 | qsc_codepython_cate_var_zero
int64 | qsc_codepython_frac_lines_pass
int64 | qsc_codepython_frac_lines_import
int64 | qsc_codepython_frac_lines_simplefunc
int64 | qsc_codepython_score_lines_no_logic
int64 | qsc_codepython_frac_lines_print
int64 | effective
string | hits
int64 |
|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|
5939ee11ff67ec65e4804c78a1e6684fc71b4f3b
| 47
|
py
|
Python
|
Helper/SystemHelper.py
|
vuthithuhuyen/milk-pasteurization-prediction
|
44e0e228f53d61c0a2fa1ed12c4803fd20ac3aad
|
[
"Apache-2.0"
] | 2
|
2022-02-10T02:10:27.000Z
|
2022-02-11T07:43:01.000Z
|
Helper/SystemHelper.py
|
vuthithuhuyen/milk-pasteurization-prediction
|
44e0e228f53d61c0a2fa1ed12c4803fd20ac3aad
|
[
"Apache-2.0"
] | null | null | null |
Helper/SystemHelper.py
|
vuthithuhuyen/milk-pasteurization-prediction
|
44e0e228f53d61c0a2fa1ed12c4803fd20ac3aad
|
[
"Apache-2.0"
] | 1
|
2022-02-10T02:10:29.000Z
|
2022-02-10T02:10:29.000Z
|
import sys
def ExitProgram():
sys.exit(0)
| 9.4
| 18
| 0.659574
| 7
| 47
| 4.428571
| 0.857143
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.027027
| 0.212766
| 47
| 5
| 19
| 9.4
| 0.810811
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0.333333
| true
| 0
| 0.333333
| 0
| 0.666667
| 0
| 1
| 1
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 1
| 1
| 0
| 1
| 0
| 1
| 0
|
0
| 7
|
59490c72d4e0b9edae2fcde2a34d5ffbcae1262f
| 210
|
py
|
Python
|
func/character.py
|
aronabot/arona_bot
|
22fee12a1c026c2788b6f3b8358bd01c592ae13f
|
[
"MIT"
] | 1
|
2021-11-12T14:45:53.000Z
|
2021-11-12T14:45:53.000Z
|
func/character.py
|
aronabot/arona_bot
|
22fee12a1c026c2788b6f3b8358bd01c592ae13f
|
[
"MIT"
] | null | null | null |
func/character.py
|
aronabot/arona_bot
|
22fee12a1c026c2788b6f3b8358bd01c592ae13f
|
[
"MIT"
] | 1
|
2021-03-17T15:45:05.000Z
|
2021-03-17T15:45:05.000Z
|
from discord.ext import commands
import arona
class Character(commands.Cog):
def __init__(self, arona: arona.Arona):
pass
def setup(arona: arona.Arona):
arona.add_cog(Character(arona))
| 23.333333
| 44
| 0.7
| 28
| 210
| 5.071429
| 0.535714
| 0.352113
| 0.316901
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.2
| 210
| 9
| 45
| 23.333333
| 0.845238
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0.285714
| false
| 0.142857
| 0.285714
| 0
| 0.714286
| 0
| 1
| 0
| 0
| null | 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 0
| 0
| 1
| 0
|
0
| 7
|
3cd0d51e07a7987e1a8a488badcbf767ba1c03b7
| 2,662
|
py
|
Python
|
tests/dicts/test_update.py
|
skippyprime/configs
|
f40bc01f1f00e32038e3aeeaa04dec600f9378f8
|
[
"Apache-2.0"
] | null | null | null |
tests/dicts/test_update.py
|
skippyprime/configs
|
f40bc01f1f00e32038e3aeeaa04dec600f9378f8
|
[
"Apache-2.0"
] | null | null | null |
tests/dicts/test_update.py
|
skippyprime/configs
|
f40bc01f1f00e32038e3aeeaa04dec600f9378f8
|
[
"Apache-2.0"
] | null | null | null |
import pytest
from assertpy import assert_that
def test_update_with_mapping():
from figtree.dictconfig import DictConfiguration
conf = DictConfiguration({
'one': 'a'
})
conf['two.child.grandchild'] = 1
assert_that(conf).is_equal_to({
'one': 'a',
'two': {
'child': {
'grandchild': 1
}
}
})
assert_that(conf['two']).is_type_of(DictConfiguration)
def test_update_with_dict():
from figtree.dictconfig import DictConfiguration
conf = DictConfiguration({
'one': 'a'
})
conf['two.child.grandchild'] = {}
conf['two.child.grandchild.first'] = 1
assert_that(conf).is_equal_to({
'one': 'a',
'two': {
'child': {
'grandchild': {
'first': 1
}
}
}
})
assert_that(conf['two']).is_type_of(DictConfiguration)
def test_update_overwrite_with_mapping():
from figtree.dictconfig import DictConfiguration
conf = DictConfiguration({
'one': 'a',
'two': 'b'
})
conf['two.child.grandchild'] = 1
assert_that(conf).is_equal_to({
'one': 'a',
'two': {
'child': {
'grandchild': 1
}
}
})
assert_that(conf['two']).is_type_of(DictConfiguration)
def test_delete_key():
from figtree.dictconfig import DictConfiguration
conf = DictConfiguration({
'one': 1,
'two': 2
})
# delete child of None type
del conf['one']
assert_that(conf).is_equal_to({
'two': 2
})
@pytest.mark.xfail(raises=KeyError)
def test_delete_on_none():
from figtree.dictconfig import DictConfiguration
conf = DictConfiguration({
'one': None
})
# delete child of None type
del conf['one.noexist']
@pytest.mark.xfail(raises=KeyError)
def test_delete_on_non_mapping():
from figtree.dictconfig import DictConfiguration
conf = DictConfiguration({
'one': 'a'
})
# delete child of non-mapping type
del conf['one.noexist']
@pytest.mark.xfail(raises=KeyError)
def test_delete_on_none_leaf():
from figtree.dictconfig import DictConfiguration
conf = DictConfiguration({
'one': None
})
# delete child of None type
del conf['one.noexist.reallynotthere']
@pytest.mark.xfail(raises=KeyError)
def test_delete_on_non_mapping_leaf():
from figtree.dictconfig import DictConfiguration
conf = DictConfiguration({
'one': 'a'
})
# delete child of non-mapping type
del conf['one.noexist.reallynotthere']
| 19.573529
| 58
| 0.592412
| 284
| 2,662
| 5.376761
| 0.15493
| 0.05239
| 0.11002
| 0.141454
| 0.937132
| 0.937132
| 0.903733
| 0.903733
| 0.820563
| 0.815324
| 0
| 0.004734
| 0.285875
| 2,662
| 135
| 59
| 19.718519
| 0.798527
| 0.053719
| 0
| 0.730337
| 0
| 0
| 0.112216
| 0.031039
| 0
| 0
| 0
| 0
| 0.089888
| 1
| 0.089888
| false
| 0
| 0.11236
| 0
| 0.202247
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 7
|
59686280b98b79a6acd0fcd8b621b2188e8b59ca
| 32
|
py
|
Python
|
src/my_package/my_package/submodule_c/c.py
|
DarkTrick/python_project_template
|
80657bcc34c53c3e70f4166408d855de494473e3
|
[
"CC0-1.0"
] | null | null | null |
src/my_package/my_package/submodule_c/c.py
|
DarkTrick/python_project_template
|
80657bcc34c53c3e70f4166408d855de494473e3
|
[
"CC0-1.0"
] | null | null | null |
src/my_package/my_package/submodule_c/c.py
|
DarkTrick/python_project_template
|
80657bcc34c53c3e70f4166408d855de494473e3
|
[
"CC0-1.0"
] | null | null | null |
def c():
return "c was called"
| 16
| 23
| 0.625
| 6
| 32
| 3.333333
| 0.833333
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.21875
| 32
| 2
| 23
| 16
| 0.8
| 0
| 0
| 0
| 0
| 0
| 0.363636
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0.5
| true
| 0
| 0
| 0.5
| 1
| 0
| 1
| 1
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 1
| 1
| 0
| 0
| 1
| 1
| 0
|
0
| 7
|
59d04f49cf5bbc7594eb37bd3f9e618dc4a771da
| 38
|
py
|
Python
|
src/lib/atexit.py
|
DTenore/skulpt
|
098d20acfb088d6db85535132c324b7ac2f2d212
|
[
"MIT"
] | 2,671
|
2015-01-03T08:23:25.000Z
|
2022-03-31T06:15:48.000Z
|
src/lib/atexit.py
|
wakeupmuyunhe/skulpt
|
a8fb11a80fb6d7c016bab5dfe3712517a350b347
|
[
"MIT"
] | 972
|
2015-01-05T08:11:00.000Z
|
2022-03-29T13:47:15.000Z
|
src/lib/atexit.py
|
wakeupmuyunhe/skulpt
|
a8fb11a80fb6d7c016bab5dfe3712517a350b347
|
[
"MIT"
] | 845
|
2015-01-03T19:53:36.000Z
|
2022-03-29T18:34:22.000Z
|
import _sk_fail; _sk_fail._("atexit")
| 19
| 37
| 0.763158
| 6
| 38
| 4
| 0.666667
| 0.5
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.078947
| 38
| 1
| 38
| 38
| 0.685714
| 0
| 0
| 0
| 0
| 0
| 0.157895
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| true
| 0
| 1
| 0
| 1
| 0
| 1
| 1
| 0
| null | 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 0
| 1
| 0
|
0
| 7
|
ab72d7cd081e9fbef17a09d25a9c68d6eb207a41
| 105
|
py
|
Python
|
weld-python/weld/grizzly/__init__.py
|
tustvold/weld
|
dcbba9a45ae2a190b31badec530ea54a58437606
|
[
"BSD-3-Clause"
] | 2,912
|
2017-03-16T19:32:54.000Z
|
2022-03-30T09:03:11.000Z
|
weld-python/weld/grizzly/__init__.py
|
QiangHeisenberg/weld
|
0926f84f6f4361e40842fcd6e00b7afdcc10a87f
|
[
"BSD-3-Clause"
] | 285
|
2017-03-16T18:01:00.000Z
|
2021-08-12T10:58:23.000Z
|
weld-python/weld/grizzly/__init__.py
|
QiangHeisenberg/weld
|
0926f84f6f4361e40842fcd6e00b7afdcc10a87f
|
[
"BSD-3-Clause"
] | 272
|
2017-03-17T06:28:58.000Z
|
2022-02-24T04:22:02.000Z
|
from weld.grizzly.core.frame import GrizzlyDataFrame
from weld.grizzly.core.series import GrizzlySeries
| 26.25
| 52
| 0.857143
| 14
| 105
| 6.428571
| 0.642857
| 0.177778
| 0.333333
| 0.422222
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.085714
| 105
| 3
| 53
| 35
| 0.9375
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| true
| 0
| 1
| 0
| 1
| 0
| 1
| 0
| 0
| null | 0
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 0
| 1
| 0
|
0
| 7
|
abcf7c152d610e462501c5e2a1cc4d297a418a7c
| 53
|
py
|
Python
|
Contests/Wannafly/day5/2.py
|
DCTewi/My-Codes
|
9904f8057ec96e21cbc8cf9c62a49658a0f6d392
|
[
"MIT"
] | null | null | null |
Contests/Wannafly/day5/2.py
|
DCTewi/My-Codes
|
9904f8057ec96e21cbc8cf9c62a49658a0f6d392
|
[
"MIT"
] | null | null | null |
Contests/Wannafly/day5/2.py
|
DCTewi/My-Codes
|
9904f8057ec96e21cbc8cf9c62a49658a0f6d392
|
[
"MIT"
] | null | null | null |
import math
print(1/math.sqrt(3))
print(math.sqrt(3))
| 17.666667
| 21
| 0.735849
| 11
| 53
| 3.545455
| 0.545455
| 0.410256
| 0.461538
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.06
| 0.056604
| 53
| 3
| 22
| 17.666667
| 0.72
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| true
| 0
| 0.333333
| 0
| 0.333333
| 0.666667
| 1
| 0
| 0
| null | 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 0
| 0
| 1
|
0
| 7
|
2802009c39b331c19da5878fb22adac45b00c88e
| 19,220
|
py
|
Python
|
tests/makeOverallTextAnalysis.py
|
ttm/gmaneLegacy
|
df9e0926c93358abdb632f7f9ce891330959c835
|
[
"Unlicense"
] | 1
|
2019-04-25T16:31:12.000Z
|
2019-04-25T16:31:12.000Z
|
tests/makeOverallTextAnalysis.py
|
ttm/gmaneLegacy
|
df9e0926c93358abdb632f7f9ce891330959c835
|
[
"Unlicense"
] | null | null | null |
tests/makeOverallTextAnalysis.py
|
ttm/gmaneLegacy
|
df9e0926c93358abdb632f7f9ce891330959c835
|
[
"Unlicense"
] | null | null | null |
import os
import gmaneLegacy as g
TDIR="/home/r/repos/artigoTextoNasRedes3/tables/SI2/"
files=os.listdir(TDIR)
files = [i for i in files if 'wnPOSInline2-n-' in i and 'tag' not in i]
adict = {1:{}, 2:{}, 3:{}}
for afile in files:
text = open(TDIR+afile,'r').read().replace('\\hline\\hline\\hline',
'\\hline').replace('\\hline\\hline',
'\\hline').split('\\\\\\hline')[3:-2]
# t = [i[1:-1] for i in text if ' total' not in i]
t = [i[1:-1] for i in text]
t_ = [i.split(' & ') for i in t]
matrix = [[float(i) for i in j[2:]] for j in t_]
names = [i[0] for i in t_]
layer = 1
for array, name in zip(matrix, names):
if ' total' in name:
layer += 1
continue
if min(array) == 0:
continue
if max(array) < 5:
low_measure = 1
elif max(array) < 10:
low_measure = 2
else:
low_measure = 0
if max(array)/min(array) > 1.5:
difference = 2
elif max(array)/min(array) > 1.1:
difference = 1
else:
difference = 0
array_ = sorted(array)
a = array
if sum([i==j for i,j in zip(array, array_)]) == 3:
order = 'h'
elif sum([i==j for i,j in zip([a[2], a[0], a[1]], array_[::-1])]) == 3:
order = 'hp'
elif sum([i==j for i,j in zip(array, array_[::-1])]) == 3:
order = 'p'
elif sum([i==j for i,j in zip([a[0], a[2], a[1]], array_[::-1])]) == 3:
order = 'ph'
elif sum([i==j for i,j in zip([a[1], a[0], a[2]], array_[::-1])]) == 3:
order = 'ip'
elif sum([i==j for i,j in zip([a[1], a[2], a[0]], array_[::-1])]) == 3:
order = 'ih'
print(array)
print(name, difference, low_measure, order)
if difference == 0:
order = '-'
elif difference == 2:
order += '*'
if low_measure == 2:
order += '_'
elif low_measure == 1:
order += '__'
print(order)
if name in adict[layer]:
adict[layer][name].append(order)
else:
adict[layer][name] = [order]
adict_ = {1:{}, 2:{}, 3:{}}
adict2 = {1:{}, 2:{}, 3:{}}
adict3 = {1:{}, 2:{}, 3:{}}
adict4 = {1:{}, 2:{}, 3:{}}
adict5 = {1:{}, 2:{}, 3:{}}
adict6 = {1:{}, 2:{}, 3:{}}
print('\n\n\n =========================')
for layer in adict:
for synset in adict[layer]:
measures = adict[layer][synset]
per = sum([i[0]=='p' for i in measures])
inte = sum([i[0]=='i' for i in measures])
hubs = sum([i[0]=='h' for i in measures])
peaks = sum(['hp' in i or 'ph' in i for i in measures])+inte
adict_[layer][synset] = ((per, inte, hubs), peaks)
measures = [i for i in measures if '__' not in i]
per = sum([i[0]=='p' for i in measures])
inte = sum([i[0]=='i' for i in measures])
hubs = sum([i[0]=='h' for i in measures])
peaks = sum(['hp' in i or 'ph' in i for i in measures])+inte
adict2[layer][synset] = ((per, inte, hubs), peaks)
measures_ = [i for i in measures if '_' not in i]
per = sum([i[0]=='p' for i in measures_])
inte = sum([i[0]=='i' for i in measures_])
hubs = sum([i[0]=='h' for i in measures_])
peaks_ = sum(['hp' in i or 'ph' in i for i in measures_])+inte
adict3[layer][synset] = ((per, inte, hubs), peaks_)
total = [0, 0, 0]
for measure in measures:
val = 1
if '_' in measure:
val *=.5
print('times .5 ===========<')
if '*' in measure:
val *= 2
if measure[0] == 'p':
total[0] += val
elif measure[0] == 'i':
total[1] += val
elif measure[0] == 'h':
total[2] += val
adict4[layer][synset] = (total, adict2[layer][synset][0], (peaks, len(measures)))
adict5[layer][synset] = (total, adict2[layer][synset][0],
(peaks, len(measures)), measures)
if len(measures) >= 13:
adict6[layer][synset] = (total, adict2[layer][synset][0], (peaks, len(measures)))
labelsh = ['synset', 'p.', 'i.', 'h', 'peaks', 'total', 'depth']
labels = []
data = []
for layer in adict6:
for entry in adict6[layer]:
labels.append(entry)
data_ = adict6[layer][entry]
data.append([*data_[1], *data_[2], layer])
fname_ = TDIR+'finalSynsetsN.tex'
caption = 'Wordnet synsets from nouns in each Erd\\"os sector.'
g.tableHelpers.lTable(labels,labelsh,data,caption,fname_,two_decimal=False)
ME=g.tableHelpers.me
ME(fname_[:-4],"\\bf",[(0,i) for i in range(0,7)])
DL=g.tableHelpers.dl
DL(fname_[:-4]+"_",[1],[1,4],[2,4,5,6,7,8,10,11,12,13,14,15])
################################################## AS
files=os.listdir(TDIR)
files = [i for i in files if 'wnPOSInline2-as-' in i and 'tag' not in i]
adict = {1:{}, 2:{}, 3:{}}
for afile in files:
text = open(TDIR+afile,'r').read().replace('\\hline\\hline\\hline',
'\\hline').replace('\\hline\\hline',
'\\hline').split('\\\\\\hline')[3:-2]
# t = [i[1:-1] for i in text if ' total' not in i]
t = [i[1:-1] for i in text]
t_ = [i.split(' & ') for i in t]
matrix = [[float(i) for i in j[2:]] for j in t_]
names = [i[0] for i in t_]
layer = 1
for array, name in zip(matrix, names):
if ' total' in name:
layer += 1
continue
if min(array) == 0:
continue
if max(array) < 5:
low_measure = 1
elif max(array) < 10:
low_measure = 2
else:
low_measure = 0
if max(array)/min(array) > 1.5:
difference = 2
elif max(array)/min(array) > 1.1:
difference = 1
else:
difference = 0
array_ = sorted(array)
a = array
if sum([i==j for i,j in zip(array, array_)]) == 3:
order = 'h'
elif sum([i==j for i,j in zip([a[2], a[0], a[1]], array_[::-1])]) == 3:
order = 'hp'
elif sum([i==j for i,j in zip(array, array_[::-1])]) == 3:
order = 'p'
elif sum([i==j for i,j in zip([a[0], a[2], a[1]], array_[::-1])]) == 3:
order = 'ph'
elif sum([i==j for i,j in zip([a[1], a[0], a[2]], array_[::-1])]) == 3:
order = 'ip'
elif sum([i==j for i,j in zip([a[1], a[2], a[0]], array_[::-1])]) == 3:
order = 'ih'
print(array)
print(name, difference, low_measure, order)
if difference == 0:
order = '-'
elif difference == 2:
order += '*'
if low_measure == 2:
order += '_'
elif low_measure == 1:
order += '__'
print(order)
if name in adict[layer]:
adict[layer][name].append(order)
else:
adict[layer][name] = [order]
adict_ = {1:{}, 2:{}, 3:{}}
adict2 = {1:{}, 2:{}, 3:{}}
adict3 = {1:{}, 2:{}, 3:{}}
adict4 = {1:{}, 2:{}, 3:{}}
adict5 = {1:{}, 2:{}, 3:{}}
adict6 = {1:{}, 2:{}, 3:{}}
print('\n\n\n =========================')
for layer in adict:
for synset in adict[layer]:
measures = adict[layer][synset]
per = sum([i[0]=='p' for i in measures])
inte = sum([i[0]=='i' for i in measures])
hubs = sum([i[0]=='h' for i in measures])
peaks = sum(['hp' in i or 'ph' in i for i in measures])+inte
adict_[layer][synset] = ((per, inte, hubs), peaks)
measures = [i for i in measures if '__' not in i]
per = sum([i[0]=='p' for i in measures])
inte = sum([i[0]=='i' for i in measures])
hubs = sum([i[0]=='h' for i in measures])
peaks = sum(['hp' in i or 'ph' in i for i in measures])+inte
adict2[layer][synset] = ((per, inte, hubs), peaks)
measures_ = [i for i in measures if '_' not in i]
per = sum([i[0]=='p' for i in measures_])
inte = sum([i[0]=='i' for i in measures_])
hubs = sum([i[0]=='h' for i in measures_])
peaks_ = sum(['hp' in i or 'ph' in i for i in measures_])+inte
adict3[layer][synset] = ((per, inte, hubs), peaks_)
total = [0, 0, 0]
for measure in measures:
val = 1
if '_' in measure:
val *=.5
print('times .5 ===========<')
if '*' in measure:
val *= 2
if measure[0] == 'p':
total[0] += val
elif measure[0] == 'i':
total[1] += val
elif measure[0] == 'h':
total[2] += val
adict4[layer][synset] = (total, adict2[layer][synset][0], (peaks, len(measures)))
adict5[layer][synset] = (total, adict2[layer][synset][0],
(peaks, len(measures)), measures)
if len(measures) >= 9:
adict6[layer][synset] = (total, adict2[layer][synset][0], (peaks, len(measures)))
labelsh = ['synset', 'p.', 'i.', 'h', 'peaks', 'total', 'depth']
labels = []
data = []
for layer in adict6:
for entry in adict6[layer]:
labels.append(entry)
data_ = adict6[layer][entry]
data.append([*data_[1], *data_[2], layer])
fname_ = TDIR+'finalSynsetsAS.tex'
caption = 'Wordnet synsets from adjectives in each Erd\\"os sector.'
g.tableHelpers.lTable(labels,labelsh,data,caption,fname_,two_decimal=False)
ME=g.tableHelpers.me
ME(fname_[:-4],"\\bf",[(0,i) for i in range(0,7)])
DL=g.tableHelpers.dl
DL(fname_[:-4]+"_",[1],[1,4],[2])
############################### V
files=os.listdir(TDIR)
files = [i for i in files if 'wnPOSInline2-v-' in i and 'tag' not in i]
adict = {1:{}, 2:{}, 3:{}, 4:{}}
for afile in files:
text = open(TDIR+afile,'r').read().replace('\\hline\\hline\\hline',
'\\hline').replace('\\hline\\hline',
'\\hline').split('\\\\\\hline')[3:-2]
# t = [i[1:-1] for i in text if ' total' not in i]
t = [i[1:-1] for i in text]
t_ = [i.split(' & ') for i in t]
matrix = [[float(i) for i in j[2:]] for j in t_]
names = [i[0] for i in t_]
layer = 1
for array, name in zip(matrix, names):
if ' total' in name:
layer += 1
continue
if min(array) == 0:
continue
if max(array) < 5:
low_measure = 1
elif max(array) < 10:
low_measure = 2
else:
low_measure = 0
if max(array)/min(array) > 1.5:
difference = 2
elif max(array)/min(array) > 1.1:
difference = 1
else:
difference = 0
array_ = sorted(array)
a = array
if sum([i==j for i,j in zip(array, array_)]) == 3:
order = 'h'
elif sum([i==j for i,j in zip([a[2], a[0], a[1]], array_[::-1])]) == 3:
order = 'hp'
elif sum([i==j for i,j in zip(array, array_[::-1])]) == 3:
order = 'p'
elif sum([i==j for i,j in zip([a[0], a[2], a[1]], array_[::-1])]) == 3:
order = 'ph'
elif sum([i==j for i,j in zip([a[1], a[0], a[2]], array_[::-1])]) == 3:
order = 'ip'
elif sum([i==j for i,j in zip([a[1], a[2], a[0]], array_[::-1])]) == 3:
order = 'ih'
print(array)
print(name, difference, low_measure, order)
if difference == 0:
order = '-'
elif difference == 2:
order += '*'
if low_measure == 2:
order += '_'
elif low_measure == 1:
order += '__'
print(order)
if name in adict[layer]:
adict[layer][name].append(order)
else:
adict[layer][name] = [order]
adict_ = {1:{}, 2:{}, 3:{}, 4:{}}
adict2 = {1:{}, 2:{}, 3:{}, 4:{}}
adict3 = {1:{}, 2:{}, 3:{}, 4:{}}
adict4 = {1:{}, 2:{}, 3:{}, 4:{}}
adict5 = {1:{}, 2:{}, 3:{}, 4:{}}
adict6 = {1:{}, 2:{}, 3:{}, 4:{}}
print('\n\n\n =========================')
for layer in adict:
for synset in adict[layer]:
measures = adict[layer][synset]
per = sum([i[0]=='p' for i in measures])
inte = sum([i[0]=='i' for i in measures])
hubs = sum([i[0]=='h' for i in measures])
peaks = sum(['hp' in i or 'ph' in i for i in measures])+inte
adict_[layer][synset] = ((per, inte, hubs), peaks)
measures = [i for i in measures if '__' not in i]
per = sum([i[0]=='p' for i in measures])
inte = sum([i[0]=='i' for i in measures])
hubs = sum([i[0]=='h' for i in measures])
peaks = sum(['hp' in i or 'ph' in i for i in measures])+inte
adict2[layer][synset] = ((per, inte, hubs), peaks)
measures_ = [i for i in measures if '_' not in i]
per = sum([i[0]=='p' for i in measures_])
inte = sum([i[0]=='i' for i in measures_])
hubs = sum([i[0]=='h' for i in measures_])
peaks_ = sum(['hp' in i or 'ph' in i for i in measures_])+inte
adict3[layer][synset] = ((per, inte, hubs), peaks_)
total = [0, 0, 0]
for measure in measures:
val = 1
if '_' in measure:
val *=.5
print('times .5 ===========<')
if '*' in measure:
val *= 2
if measure[0] == 'p':
total[0] += val
elif measure[0] == 'i':
total[1] += val
elif measure[0] == 'h':
total[2] += val
adict4[layer][synset] = (total, adict2[layer][synset][0], (peaks, len(measures)))
adict5[layer][synset] = (total, adict2[layer][synset][0],
(peaks, len(measures)), measures)
if len(measures) >= 13:
adict6[layer][synset] = (total, adict2[layer][synset][0], (peaks, len(measures)))
labelsh = ['synset', 'p.', 'i.', 'h', 'peaks', 'total', 'depth']
labels = []
data = []
for layer in adict6:
for entry in adict6[layer]:
labels.append(entry)
data_ = adict6[layer][entry]
data.append([*data_[1], *data_[2], layer])
fname_ = TDIR+'finalSynsetsV.tex'
caption = 'Wordnet synsets from verbs in each Erd\\"os sector.'
g.tableHelpers.lTable(labels,labelsh,data,caption,fname_,two_decimal=False)
ME=g.tableHelpers.me
ME(fname_[:-4],"\\bf",[(0,i) for i in range(0,7)])
DL=g.tableHelpers.dl
DL(fname_[:-4]+"_",[1],[1,4],[2,3,4,5,6,8,9,10,11,13,14,15,17,18])
################################## R
files=os.listdir(TDIR)
files = [i for i in files if 'wnPOSInline2-r-' in i and 'tag' not in i]
adict = {1:{}, 2:{}, 3:{}}
for afile in files:
text = open(TDIR+afile,'r').read().replace('\\hline\\hline\\hline',
'\\hline').replace('\\hline\\hline',
'\\hline').split('\\\\\\hline')[3:-2]
# t = [i[1:-1] for i in text if ' total' not in i]
t = [i[1:-1] for i in text]
t_ = [i.split(' & ') for i in t]
matrix = [[float(i) for i in j[2:]] for j in t_]
names = [i[0] for i in t_]
layer = 1
for array, name in zip(matrix, names):
if ' total' in name:
layer += 1
continue
if min(array) == 0:
continue
if max(array) < 5:
low_measure = 1
elif max(array) < 10:
low_measure = 2
else:
low_measure = 0
if max(array)/min(array) > 1.5:
difference = 2
elif max(array)/min(array) > 1.1:
difference = 1
else:
difference = 0
array_ = sorted(array)
a = array
if sum([i==j for i,j in zip(array, array_)]) == 3:
order = 'h'
elif sum([i==j for i,j in zip([a[2], a[0], a[1]], array_[::-1])]) == 3:
order = 'hp'
elif sum([i==j for i,j in zip(array, array_[::-1])]) == 3:
order = 'p'
elif sum([i==j for i,j in zip([a[0], a[2], a[1]], array_[::-1])]) == 3:
order = 'ph'
elif sum([i==j for i,j in zip([a[1], a[0], a[2]], array_[::-1])]) == 3:
order = 'ip'
elif sum([i==j for i,j in zip([a[1], a[2], a[0]], array_[::-1])]) == 3:
order = 'ih'
print(array)
print(name, difference, low_measure, order)
if difference == 0:
order = '-'
elif difference == 2:
order += '*'
if low_measure == 2:
order += '_'
elif low_measure == 1:
order += '__'
print(order)
if name in adict[layer]:
adict[layer][name].append(order)
else:
adict[layer][name] = [order]
adict_ = {1:{}, 2:{}, 3:{}}
adict2 = {1:{}, 2:{}, 3:{}}
adict3 = {1:{}, 2:{}, 3:{}}
adict4 = {1:{}, 2:{}, 3:{}}
adict5 = {1:{}, 2:{}, 3:{}}
adict6 = {1:{}, 2:{}, 3:{}}
print('\n\n\n =========================')
for layer in adict:
for synset in adict[layer]:
measures = adict[layer][synset]
per = sum([i[0]=='p' for i in measures])
inte = sum([i[0]=='i' for i in measures])
hubs = sum([i[0]=='h' for i in measures])
peaks = sum(['hp' in i or 'ph' in i for i in measures])+inte
adict_[layer][synset] = ((per, inte, hubs), peaks)
measures = [i for i in measures if '__' not in i]
per = sum([i[0]=='p' for i in measures])
inte = sum([i[0]=='i' for i in measures])
hubs = sum([i[0]=='h' for i in measures])
peaks = sum(['hp' in i or 'ph' in i for i in measures])+inte
adict2[layer][synset] = ((per, inte, hubs), peaks)
measures_ = [i for i in measures if '_' not in i]
per = sum([i[0]=='p' for i in measures_])
inte = sum([i[0]=='i' for i in measures_])
hubs = sum([i[0]=='h' for i in measures_])
peaks_ = sum(['hp' in i or 'ph' in i for i in measures_])+inte
adict3[layer][synset] = ((per, inte, hubs), peaks_)
total = [0, 0, 0]
for measure in measures:
val = 1
if '_' in measure:
val *=.5
print('times .5 ===========<')
if '*' in measure:
val *= 2
if measure[0] == 'p':
total[0] += val
elif measure[0] == 'i':
total[1] += val
elif measure[0] == 'h':
total[2] += val
adict4[layer][synset] = (total, adict2[layer][synset][0], (peaks, len(measures)))
adict5[layer][synset] = (total, adict2[layer][synset][0],
(peaks, len(measures)), measures)
if len(measures) >= 13:
adict6[layer][synset] = (total, adict2[layer][synset][0], (peaks, len(measures)))
labelsh = ['synset', 'p.', 'i.', 'h', 'peaks', 'total', 'depth']
labels = []
data = []
for layer in adict6:
for entry in adict6[layer]:
labels.append(entry)
data_ = adict6[layer][entry]
data.append([*data_[1], *data_[2], layer])
fname_ = TDIR+'finalSynsetsR.tex'
caption = 'Wordnet synsets from adverbs in each Erd\\"os sector.'
g.tableHelpers.lTable(labels,labelsh,data,caption,fname_,two_decimal=False)
ME=g.tableHelpers.me
ME(fname_[:-4],"\\bf",[(0,i) for i in range(0,7)])
DL=g.tableHelpers.dl
DL(fname_[:-4]+"_",[1],[1,4],[2,3,4,5])
| 36.332703
| 93
| 0.476275
| 2,782
| 19,220
| 3.231488
| 0.045291
| 0.048053
| 0.056062
| 0.087208
| 0.972748
| 0.960067
| 0.960067
| 0.960067
| 0.960067
| 0.960067
| 0
| 0.04232
| 0.316441
| 19,220
| 528
| 94
| 36.401515
| 0.641955
| 0.01051
| 0
| 0.944099
| 0
| 0
| 0.063029
| 0.012172
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| false
| 0
| 0.004141
| 0
| 0.004141
| 0.041408
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 7
|
e6299b6cdf9f0f48473b770cb71341f2acaf6702
| 36,478
|
py
|
Python
|
brownie/tests/test_exchange.py
|
SuperZooper3/AstroSwap
|
86441eb099c8b518441cd9bb58bf8ac75e5709c1
|
[
"MIT"
] | 1
|
2022-02-10T05:38:20.000Z
|
2022-02-10T05:38:20.000Z
|
brownie/tests/test_exchange.py
|
SuperZooper3/AstroSwap
|
86441eb099c8b518441cd9bb58bf8ac75e5709c1
|
[
"MIT"
] | null | null | null |
brownie/tests/test_exchange.py
|
SuperZooper3/AstroSwap
|
86441eb099c8b518441cd9bb58bf8ac75e5709c1
|
[
"MIT"
] | 1
|
2022-02-18T13:08:58.000Z
|
2022-02-18T13:08:58.000Z
|
from scripts.helpers import smart_get_account, LOCAL_BLOCKCHAIN_ENVIRONMENTS, calculate_liquidity_pool_output
from scripts.runAstroSwap import deploy_erc20 , exchange_from_address
from brownie import network, accounts, config, AstroSwapExchange, AstroSwapFactory
import pytest
from random import randint
# All the exchange tests
# - Deploy an exchange contract
# - Seed invest properly
# - Seed invest just tokens (should fail)
# - Seed invest just ETH (should fail)
# - Seed invest nothing (not going to fail since it's just dumb to do)
# - Seed invest without authorising the ERC20 transfer (should fail)
# - Invest properly
# - Invest just ETH throught not authorising (should fail)
# - Invest nothing (should fail)
# - Invest half as much as what was seeded
# - Invest 2x as much as what was seeded
# - Invest while only authroising half of the ERC20 transfer (should fail)
# - Try calling ethToTokenPrivate (should fail)
# - Try calling tokenToEthPrivate (should fail)
# - Call ethToToken with a 0 min
# - Call ethToToken with min > expected (should fail)
# - Call ethToToken with min < expected
# - Try calling ethToToken without seeding (should fail)
# - Call tokenToEth with a 0 min
# - Call tokenToEth with min > expected (should fail)
# - Call tokenToEth with min < expected
# - Try calling tokenToEth without seeding (should fail)
# - Compare EthToTokenQuote to actual cost (3 different values)
# - Compare TokenToEthQuote to actual cost (3 different values)
# - Divest everything properly
# - Divest half as much as what was invested
# - Divest more than was invested (should fail)
# - Exchange tokenToToken properly
# - Exchange tokenToToken with a min > expected (should fail)
# - Exchange tokenToToken without seeding anything (should fail)
# - Exchange tokenToToken while only seeding from contract (should fail)
# - Exchange tokenToToken while only seeding to account (should fail)
# - Compare tokenToTokenQuote to actual cost (3 different values + 1 random one)
# - Compare investQuoteFromEth to actual cost (3 different values)
# - Compare investQuoteFromToken to actual cost (3 different values)
def setupExchange(factory, token1, token2, account):
exchangeForge1 = factory.addTokenExchange(token1.address, {'from': account})
exchangeForge1.wait(1)
exchangeAddress1 = exchangeForge1.events["TokenExchangeAdded"][0]["tokenExchange"]
exchange1 = exchange_from_address(exchangeAddress1)
exchangeForge2 = factory.addTokenExchange(token2.address, {'from': account})
exchangeForge2.wait(1)
exchangeAddress2 = exchangeForge2.events["TokenExchangeAdded"][0]["tokenExchange"]
exchange2 = exchange_from_address(exchangeAddress2)
return exchange1, exchange2
fee = 400
def test_exchange_deploy():
# Arrange
token = deploy_erc20()
account = smart_get_account(1)
# Act
exchange = AstroSwapExchange.deploy(
token.address,
fee,
{'from': account},
publish_source = config["networks"][network.show_active()].get("verify", False)
)
print("AstroSwapExchage@", exchange)
# Assert
assert AstroSwapExchange[-1] != "0x0000000000000000000000000000000000000000"
def test_exchange_seed_invest_properly():
# Going with an seed rate of 100 tokens for 1 ETH
# Arrange
token = deploy_erc20()
account = smart_get_account(1)
exchange = AstroSwapExchange.deploy(
token.address,
fee,
{'from': account},
publish_source = config["networks"][network.show_active()].get("verify", False)
)
approveTx = token.approve(exchange.address, 100*10**18, {'from': account})
approveTx.wait(1)
# Act
seedTx = exchange.seedInvest(100*10**18, {'from': account, 'value': 1*10**18})
seedTx.wait(1)
# Assert
assert seedTx.events["Investment"]["sharesPurchased"] == 10000
print(exchange.ethPool())
assert exchange.ethPool() == 1*10**18
assert exchange.tokenPool() == 100*10**18
assert exchange.totalShares() == 10000
assert exchange.invariant() == exchange.ethPool() * exchange.tokenPool()
def test_exchange_seed_invest_just_tokens():
# Arrange
token = deploy_erc20()
account = smart_get_account(1)
exchange = AstroSwapExchange.deploy(
token.address,
fee,
{'from': account},
publish_source = config["networks"][network.show_active()].get("verify", False)
)
approveTx = token.approve(exchange.address, 100*10**18, {'from': account})
approveTx.wait(1)
# Act & Assert
with pytest.raises(Exception):
seedTx = exchange.seedInvest(100*10**18, {'from': account, 'value': 0})
seedTx.wait(1)
def test_exchange_seed_invest_just_eth():
# Arrange
token = deploy_erc20()
account = smart_get_account(1)
exchange = AstroSwapExchange.deploy(
token.address,
fee,
{'from': account},
publish_source = config["networks"][network.show_active()].get("verify", False)
)
# Act & Assert
with pytest.raises(Exception):
seedTx = exchange.seedInvest(0, {'from': account, 'value': 1*10**18})
seedTx.wait(1)
def test_exchange_seed_invest_nothing():
# Arrange
token = deploy_erc20()
account = smart_get_account(1)
exchange = AstroSwapExchange.deploy(
token.address,
fee,
{'from': account},
publish_source = config["networks"][network.show_active()].get("verify", False)
)
# Act & Assert
with pytest.raises(Exception):
seedTx = exchange.seedInvest(0, {'from': account, 'value': 0})
seedTx.wait(1)
def test_exchange_seed_invest_without_authorising_erc20_transfer():
# Arrange
token = deploy_erc20()
account = smart_get_account(1)
exchange = AstroSwapExchange.deploy(
token.address,
fee,
{'from': account},
publish_source = config["networks"][network.show_active()].get("verify", False)
)
# Act & Assert
with pytest.raises(Exception):
seedTx = exchange.seedInvest(100*10**18, {'from': account, 'value': 1*10**18})
seedTx.wait(1)
def test_exchange_invest_properly():
# Arrange
token = deploy_erc20()
account = smart_get_account(1)
exchange = AstroSwapExchange.deploy(
token.address,
fee,
{'from': account},
publish_source = config["networks"][network.show_active()].get("verify", False)
)
approveTx = token.approve(exchange.address, 200*10**18, {'from': account})
approveTx.wait(1)
seedTx = exchange.seedInvest(100*10**18, {'from': account, 'value': 1*10**18})
seedTx.wait(1)
# Act
investTx = exchange.invest(10**22, {'from': account, 'value': 1*10**18})
investTx.wait(1)
# Assert
assert investTx.events["Investment"]["sharesPurchased"] == 10000
assert exchange.ethPool() == 2*10**18
assert exchange.tokenPool() == 200*10**18
assert exchange.totalShares() == 20000
assert exchange.invariant() == exchange.ethPool() * exchange.tokenPool()
def test_exchange_invest_just_eth():
# Arrange
token = deploy_erc20()
account = smart_get_account(1)
exchange = AstroSwapExchange.deploy(
token.address,
fee,
{'from': account},
publish_source = config["networks"][network.show_active()].get("verify", False)
)
approveTx = token.approve(exchange.address, 100*10**18, {'from': account})
approveTx.wait(1)
seedTx = exchange.seedInvest(100*10**18, {'from': account, 'value': 1*10**18})
seedTx.wait(1)
# Act & Assert
with pytest.raises(Exception): # Fails because we only authorize transfer of tokens for the seeding
investTx = exchange.invest(10**22, {'from': account, 'value': 1*10**18})
investTx.wait(1)
def test_exchange_invest_nothing():
# Arrange
token = deploy_erc20()
account = smart_get_account(1)
exchange = AstroSwapExchange.deploy(
token.address,
fee,
{'from': account},
publish_source = config["networks"][network.show_active()].get("verify", False)
)
approveTx = token.approve(exchange.address, 200*10**18, {'from': account})
approveTx.wait(1)
seedTx = exchange.seedInvest(100*10**18, {'from': account, 'value': 1*10**18})
seedTx.wait(1)
# Act
investTx = exchange.invest(10**22, {'from': account, 'value': 0})
investTx.wait(1)
# Assert
assert investTx.events["Investment"]["sharesPurchased"] == 0
def test_exchange_invest_half():
# Arrange
token = deploy_erc20()
account = smart_get_account(1)
exchange = AstroSwapExchange.deploy(
token.address,
fee,
{'from': account},
publish_source = config["networks"][network.show_active()].get("verify", False)
)
approveTx = token.approve(exchange.address, 150*10**18, {'from': account})
approveTx.wait(1)
seedTx = exchange.seedInvest(100*10**18, {'from': account, 'value': 1*10**18})
seedTx.wait(1)
# Act
investTx = exchange.invest(10**22, {'from': account, 'value': 0.5*10**18})
investTx.wait(1)
# Assert
print(investTx.events)
assert investTx.events["Investment"]["sharesPurchased"] == 5000
assert exchange.ethPool() == 1.5*10**18
assert exchange.tokenPool() == 150*10**18
assert exchange.totalShares() == 15000
assert exchange.invariant() == exchange.ethPool() * exchange.tokenPool()
def test_exchange_invest_two_x():
# Arrange
token = deploy_erc20()
account = smart_get_account(1)
exchange = AstroSwapExchange.deploy(
token.address,
fee,
{'from': account},
publish_source = config["networks"][network.show_active()].get("verify", False)
)
approveTx = token.approve(exchange.address, 300*10**18, {'from': account})
approveTx.wait(1)
seedTx = exchange.seedInvest(100*10**18, {'from': account, 'value': 1*10**18})
seedTx.wait(1)
# Act
investTx = exchange.invest(10**22, {'from': account, 'value': 2*10**18})
investTx.wait(1)
# Assert
assert investTx.events["Investment"]["sharesPurchased"] == 20000
assert exchange.ethPool() == 3*10**18
assert exchange.tokenPool() == 300*10**18
assert exchange.totalShares() == 30000
assert exchange.invariant() == exchange.ethPool() * exchange.tokenPool()
def test_exchange_invest_maxToken_double():
# Arrange
token = deploy_erc20()
account = smart_get_account(1)
exchange = AstroSwapExchange.deploy(
token.address,
fee,
{'from': account},
publish_source = config["networks"][network.show_active()].get("verify", False)
)
approveTx = token.approve(exchange.address, 200*10**18, {'from': account})
approveTx.wait(1)
seedTx = exchange.seedInvest(100*10**18, {'from': account, 'value': 1*10**18})
seedTx.wait(1)
# Act
investTx = exchange.invest(200*10**18, {'from': account, 'value': 1*10**18})
investTx.wait(1)
# Assert
assert investTx.events["Investment"]["sharesPurchased"] == 10000
assert exchange.ethPool() == 2*10**18
assert exchange.tokenPool() == 200*10**18
assert exchange.totalShares() == 20000
assert exchange.invariant() == exchange.ethPool() * exchange.tokenPool()
def test_exchange_invest_maxToken_exact():
# Arrange
token = deploy_erc20()
account = smart_get_account(1)
exchange = AstroSwapExchange.deploy(
token.address,
fee,
{'from': account},
publish_source = config["networks"][network.show_active()].get("verify", False)
)
approveTx = token.approve(exchange.address, 200*10**18, {'from': account})
approveTx.wait(1)
seedTx = exchange.seedInvest(100*10**18, {'from': account, 'value': 1*10**18})
seedTx.wait(1)
# Act
investTx = exchange.invest(100*10**18, {'from': account, 'value': 1*10**18})
investTx.wait(1)
# Assert
assert investTx.events["Investment"]["sharesPurchased"] == 10000
assert exchange.ethPool() == 2*10**18
assert exchange.tokenPool() == 200*10**18
assert exchange.totalShares() == 20000
assert exchange.invariant() == exchange.ethPool() * exchange.tokenPool()
def test_exchange_invest_maxToken_less():
# Arrange
token = deploy_erc20()
account = smart_get_account(1)
exchange = AstroSwapExchange.deploy(
token.address,
fee,
{'from': account},
publish_source = config["networks"][network.show_active()].get("verify", False)
)
approveTx = token.approve(exchange.address, 200*10**18, {'from': account})
approveTx.wait(1)
seedTx = exchange.seedInvest(100*10**18, {'from': account, 'value': 1*10**18})
seedTx.wait(1)
# Act & Assert
with pytest.raises(Exception):
investTx = exchange.invest(99*10**18, {'from': account, 'value': 1*10**18})
investTx.wait(1)
def test_try_calling_privates():
# Arrange
token = deploy_erc20()
account = smart_get_account(1)
exchange = AstroSwapExchange.deploy(
token.address,
fee,
{'from': account},
publish_source = config["networks"][network.show_active()].get("verify", False)
)
approveTx = token.approve(exchange.address, 100*10**18, {'from': account})
approveTx.wait(1)
seedTx = exchange.seedInvest(100*10**18, {'from': account, 'value': 1*10**18})
seedTx.wait(1)
# Act & Assert
with pytest.raises(Exception):
exchange.ethToTokenPrivate(1*10**18, {'from': account})
with pytest.raises(Exception):
exchange.tokenToEthPrivate(1*10**18, {'from': account})
def test_call_ethToToken_min_0():
# Arrange
token = deploy_erc20()
account = smart_get_account(1)
exchange = AstroSwapExchange.deploy(
token.address,
fee,
{'from': account},
publish_source = config["networks"][network.show_active()].get("verify", False)
)
approveTx = token.approve(exchange.address, 100*10**18, {'from': account})
approveTx.wait(1)
seedTx = exchange.seedInvest(100*10**18, {'from': account, 'value': 1*10**18})
seedTx.wait(1)
beforeInvariant = exchange.invariant()
expectedReturn = calculate_liquidity_pool_output(exchange.ethPool(), exchange.tokenPool(), 0.1*10**18, fee)
# Act
ethToTokenTx = exchange.ethToToken(account.address, 0, {'from': account, 'value': 0.1*10**18})
ethToTokenTx.wait(1)
# Assert
assert abs(ethToTokenTx.events["TokenPurchase"]["tokensOut"] / expectedReturn - 1) < 0.0001 # I was having some jank in the math, was not able to find it. 0.01% error is fine.
assert exchange.invariant() >= beforeInvariant
def test_call_ethToToken_min_smaller():
# Arrange
token = deploy_erc20()
account = smart_get_account(1)
exchange = AstroSwapExchange.deploy(
token.address,
fee,
{'from': account},
publish_source = config["networks"][network.show_active()].get("verify", False)
)
approveTx = token.approve(exchange.address, 100*10**18, {'from': account})
approveTx.wait(1)
seedTx = exchange.seedInvest(100*10**18, {'from': account, 'value': 1*10**18})
seedTx.wait(1)
beforeInvariant = exchange.invariant()
expectedReturn = calculate_liquidity_pool_output(exchange.ethPool(), exchange.tokenPool(), 0.1*10**18, fee)
# Act
ethToTokenTx = exchange.ethToToken(account.address, expectedReturn * 0.95, {'from': account, 'value': 0.1*10**18})
# Above has 5% slippage
ethToTokenTx.wait(1)
# Assert
assert abs(ethToTokenTx.events["TokenPurchase"]["tokensOut"] / expectedReturn - 1) < 0.0001 # I was having some jank in the math, was not able to find it. 0.01% error is fine.
assert exchange.invariant() >= beforeInvariant
def test_call_ethToToken_min_larger():
# Arrange
token = deploy_erc20()
account = smart_get_account(1)
exchange = AstroSwapExchange.deploy(
token.address,
fee,
{'from': account},
publish_source = config["networks"][network.show_active()].get("verify", False)
)
approveTx = token.approve(exchange.address, 100*10**18, {'from': account})
approveTx.wait(1)
seedTx = exchange.seedInvest(100*10**18, {'from': account, 'value': 1*10**18})
seedTx.wait(1)
expectedReturn = calculate_liquidity_pool_output(exchange.ethPool(), exchange.tokenPool(), 0.1*10**18, fee)
# Act & Assert
with pytest.raises(Exception):
ethToTokenTx = exchange.ethToToken(account.address, expectedReturn * 1.05, {'from': account, 'value': 0.1*10**18})
ethToTokenTx.wait(1)
def test_call_ethToToken_no_seed():
# Arrange
token = deploy_erc20()
account = smart_get_account(1)
exchange = AstroSwapExchange.deploy(
token.address,
fee,
{'from': account},
publish_source = config["networks"][network.show_active()].get("verify", False)
)
# Act & Assert
with pytest.raises(Exception):
ethToTokenTx = exchange.ethToToken(account.address, 1*10**18, {'from': account, 'value': 0.1*10**18})
ethToTokenTx.wait(1)
def test_call_tokenToEth_min_0():
# Arrange
token = deploy_erc20()
account = smart_get_account(1)
exchange = AstroSwapExchange.deploy(
token.address,
fee,
{'from': account},
publish_source = config["networks"][network.show_active()].get("verify", False)
)
approveTx = token.approve(exchange.address, 100*10**18 + 2*10**18, {'from': account})
approveTx.wait(1)
seedTx = exchange.seedInvest(100*10**18, {'from': account, 'value': 1*10**18})
seedTx.wait(1)
beforeInvariant = exchange.invariant()
expectedReturn = calculate_liquidity_pool_output(exchange.tokenPool(), exchange.ethPool(), 2*10**18, fee)
# Act
tokenToEthTx = exchange.tokenToEth(account.address, 2*10**18, 0, {'from': account})
tokenToEthTx.wait(1)
# Assert
assert abs(tokenToEthTx.events["EthPurchase"]["ethOut"] / expectedReturn - 1) < 0.0001 # I was having some jank in the math, was not able to find it. 0.01% error is fine.
assert exchange.invariant() >= beforeInvariant
def test_call_tokenToEth_min_smaller():
# Arrange
token = deploy_erc20()
account = smart_get_account(1)
exchange = AstroSwapExchange.deploy(
token.address,
fee,
{'from': account},
publish_source = config["networks"][network.show_active()].get("verify", False)
)
approveTx = token.approve(exchange.address, 100*10**18 + 2*10**18, {'from': account})
approveTx.wait(1)
seedTx = exchange.seedInvest(100*10**18, {'from': account, 'value': 1*10**18})
seedTx.wait(1)
beforeInvariant = exchange.invariant()
expectedReturn = calculate_liquidity_pool_output(exchange.tokenPool(), exchange.ethPool(), 2*10**18, fee)
# Act
tokenToEthTx = exchange.tokenToEth(account.address, 2*10**18, expectedReturn * 0.95, {'from': account})
# Above has 5% slippage
tokenToEthTx.wait(1)
# Assert
assert abs(tokenToEthTx.events["EthPurchase"]["ethOut"] / expectedReturn - 1) < 0.0001 # I was having some jank in the math, was not able to find it. 0.01% error is fine.
assert exchange.invariant() >= beforeInvariant
def test_call_tokenToEth_min_larger():
# Arrange
token = deploy_erc20()
account = smart_get_account(1)
exchange = AstroSwapExchange.deploy(
token.address,
fee,
{'from': account},
publish_source = config["networks"][network.show_active()].get("verify", False)
)
approveTx = token.approve(exchange.address, 100*10**18 + 2*10**18, {'from': account})
approveTx.wait(1)
seedTx = exchange.seedInvest(100*10**18, {'from': account, 'value': 1*10**18})
seedTx.wait(1)
expectedReturn = calculate_liquidity_pool_output(exchange.tokenPool(), exchange.ethPool(), 2*10**18, fee)
# Act & Assert
with pytest.raises(Exception):
tokenToEthTx = exchange.tokenToEth(account.address, 2*10**18, expectedReturn * 1.05, {'from': account})
tokenToEthTx.wait(1)
def test_call_tokenToEth_no_seed():
# Arrange
token = deploy_erc20()
account = smart_get_account(1)
exchange = AstroSwapExchange.deploy(
token.address,
fee,
{'from': account},
publish_source = config["networks"][network.show_active()].get("verify", False)
)
# Act & Assert
with pytest.raises(Exception):
tokenToEthTx = exchange.tokenToEth(account.address, 2*10**18, 0, {'from': account})
tokenToEthTx.wait(1)
def test_ethToToken_quotes():
# Arrange
token = deploy_erc20()
account = smart_get_account(1)
exchange = AstroSwapExchange.deploy(
token.address,
fee,
{'from': account},
publish_source = config["networks"][network.show_active()].get("verify", False)
)
approveTx = token.approve(exchange.address, 200*10**18, {'from': account})
approveTx.wait(1)
seedTx = exchange.seedInvest(100*10**18, {'from': account, 'value': 1*10**18})
seedTx.wait(1)
quote = exchange.getEthToTokenQuote(0.1*10**18)
# Act
ethToTokenTx = exchange.ethToToken(account.address, 0, {'from': account, 'value': 0.1*10**18})
ethToTokenTx.wait(1)
# Assert
assert abs(ethToTokenTx.events["TokenPurchase"]["tokensOut"] / quote - 1) == 0
# And again!
quote = exchange.getEthToTokenQuote(0.3*10**18)
# Act
ethToTokenTx = exchange.ethToToken(account.address, 0, {'from': account, 'value': 0.3*10**18})
ethToTokenTx.wait(1)
# Assert
assert abs(ethToTokenTx.events["TokenPurchase"]["tokensOut"] / quote - 1) == 0
# And again!
quote = exchange.getEthToTokenQuote(123456789)
# Act
ethToTokenTx = exchange.ethToToken(account.address, 0, {'from': account, 'value': 123456789})
ethToTokenTx.wait(1)
# Assert
assert abs(ethToTokenTx.events["TokenPurchase"]["tokensOut"] / quote - 1) == 0
def test_tokenToEth_quotes():
# Arrange
token = deploy_erc20()
account = smart_get_account(1)
exchange = AstroSwapExchange.deploy(
token.address,
fee,
{'from': account},
publish_source = config["networks"][network.show_active()].get("verify", False)
)
approveTx = token.approve(exchange.address, 200*10**18, {'from': account})
approveTx.wait(1)
seedTx = exchange.seedInvest(100*10**18, {'from': account, 'value': 1*10**18})
seedTx.wait(1)
quote = exchange.getTokenToEthQuote(1*10**18)
# Act
tokenToEthTx = exchange.tokenToEth(account.address, 1*10**18, 0, {'from': account})
tokenToEthTx.wait(1)
# Assert
assert abs(tokenToEthTx.events["EthPurchase"]["ethOut"] / quote - 1) == 0
# And again!
quote = exchange.getTokenToEthQuote(6*10**18)
# Act
tokenToEthTx = exchange.tokenToEth(account.address, 6*10**18, 0, {'from': account})
tokenToEthTx.wait(1)
# Assert
assert abs(tokenToEthTx.events["EthPurchase"]["ethOut"] / quote - 1) == 0
# And again!
quote = exchange.getTokenToEthQuote(1234567)
# Act
tokenToEthTx = exchange.tokenToEth(account.address, 1234567, 0, {'from': account})
tokenToEthTx.wait(1)
# Assert
assert abs(tokenToEthTx.events["EthPurchase"]["ethOut"] / quote - 1) == 0
def test_divest_proprely():
# Arrange
token = deploy_erc20()
account = smart_get_account(1)
exchange = AstroSwapExchange.deploy(
token.address,
fee,
{'from': account},
publish_source = config["networks"][network.show_active()].get("verify", False)
)
approveTx = token.approve(exchange.address, 100*10**18, {'from': account})
approveTx.wait(1)
print("Seeding")
seedTx = exchange.seedInvest(100*10**18, {'from': account, 'value': 1*10**18})
seedTx.wait(1)
print("Divesting")
# Act
divestTx = exchange.divest(10000, {'from': account})
divestTx.wait(1)
# Assert
assert exchange.invariant() == 0
assert exchange.tokenPool() == 0
assert exchange.ethPool() == 0
assert exchange.invariant() == 0
assert exchange.getShares(account.address) == 0
def test_divest_half():
# Arrange
token = deploy_erc20()
account = smart_get_account(1)
exchange = AstroSwapExchange.deploy(
token.address,
fee,
{'from': account},
publish_source = config["networks"][network.show_active()].get("verify", False)
)
approveTx = token.approve(exchange.address, 100*10**18, {'from': account})
approveTx.wait(1)
print("Seeding")
seedTx = exchange.seedInvest(100*10**18, {'from': account, 'value': 1*10**18})
seedTx.wait(1)
print("Divesting")
# Act
divestTx = exchange.divest(10000*0.5, {'from': account})
divestTx.wait(1)
# Assert
assert exchange.invariant() == 1*10**18*0.5 * 100*10**18*0.5
assert exchange.tokenPool() == 50*10**18
assert exchange.ethPool() == 0.5*10**18
assert exchange.getShares(account.address) == 10000 - 10000*0.5
def test_divest_more_than_owned():
# Arrange
token = deploy_erc20()
account = smart_get_account(1)
exchange = AstroSwapExchange.deploy(
token.address,
fee,
{'from': account},
publish_source = config["networks"][network.show_active()].get("verify", False)
)
approveTx = token.approve(exchange.address, 100*10**18, {'from': account})
approveTx.wait(1)
print("Seeding")
seedTx = exchange.seedInvest(100*10**18, {'from': account, 'value': 1*10**18})
seedTx.wait(1)
print("Divesting")
# Act & Assert
with pytest.raises(Exception):
divestTx = exchange.divest(10000*2, {'from': account})
divestTx.wait(1)
def test_call_tokenToToken():
# Arrange
token1 = deploy_erc20()
token2 = deploy_erc20()
account = smart_get_account(1)
factory = AstroSwapFactory.deploy(
fee,
{'from': account},
publish_source = config["networks"][network.show_active()].get("verify", False)
)
# Create the two exchanges
exchange1, exchange2 = setupExchange(factory, token1, token2, account)
# Seed the two exchanges
approve1Tx = token1.approve(exchange1.address, 100*10**18, {'from': account})
approve1Tx.wait(1)
approve2Tx = token2.approve(exchange2.address, 100*10**18, {'from': account})
approve2Tx.wait(1)
seed1Tx = exchange1.seedInvest(100*10**18, {'from': account, 'value': 1*10**18})
seed1Tx.wait(1)
seed2Tx = exchange2.seedInvest(100*10**18, {'from': account, 'value': 1*10**18})
seed2Tx.wait(1)
# Act
approve1Tx = token1.approve(exchange1.address, 0.1*10**18, {'from': account})
approve1Tx.wait(1)
tokenToTokenTx = exchange1.tokenToToken(account.address, token2.address, 0.1*10**18, 0, {'from': account})
tokenToTokenTx.wait(1)
# Assert
assert tokenToTokenTx.events["TokenToTokenOut"]["tokenExchangeAddress"] == exchange2.address
assert tokenToTokenTx.events["TokenToTokenOut"]["recipient"] == account.address
assert tokenToTokenTx.events["TokenToTokenOut"]["user"] == account.address
assert tokenToTokenTx.events["TokenToTokenOut"]["ethTransfer"] > 0
assert tokenToTokenTx.events["TokenPurchase"]["user"] == exchange1.address
assert tokenToTokenTx.events["TokenPurchase"]["ethIn"] == tokenToTokenTx.events["TokenToTokenOut"]["ethTransfer"]
assert tokenToTokenTx.events["TokenPurchase"]["tokensOut"] > 0
def test_call_tokenToToken_min_bigger():
# Arrange
token1 = deploy_erc20()
token2 = deploy_erc20()
account = smart_get_account(1)
factory = AstroSwapFactory.deploy(
fee,
{'from': account},
publish_source = config["networks"][network.show_active()].get("verify", False)
)
# Create the two exchanges
exchange1, exchange2 = setupExchange(factory, token1, token2, account)
# Seed the two exchanges
approve1Tx = token1.approve(exchange1.address, 100*10**18, {'from': account})
approve1Tx.wait(1)
approve2Tx = token2.approve(exchange2.address, 100*10**18, {'from': account})
approve2Tx.wait(1)
seed1Tx = exchange1.seedInvest(100*10**18, {'from': account, 'value': 1*10**18})
seed1Tx.wait(1)
seed2Tx = exchange2.seedInvest(100*10**18, {'from': account, 'value': 1*10**18})
seed2Tx.wait(1)
# Act & Assert
with pytest.raises(Exception):
approve1Tx = token1.approve(exchange1.address, 0.1*10**18, {'from': account})
approve1Tx.wait(1)
tokenToTokenTx = exchange1.tokenToToken(account.address, token2.address, 0.1*10**18, 100*10**18, {'from': account})
tokenToTokenTx.wait(1)
def test_call_tokenToToken_seeded_only_from():
# Arrange
token1 = deploy_erc20()
token2 = deploy_erc20()
account = smart_get_account(1)
factory = AstroSwapFactory.deploy(
fee,
{'from': account},
publish_source = config["networks"][network.show_active()].get("verify", False)
)
# Create the two exchanges
exchange1, exchange2 = setupExchange(factory, token1, token2, account)
# Seed the two exchanges
approve1Tx = token1.approve(exchange1.address, 100*10**18, {'from': account})
approve1Tx.wait(1)
seed1Tx = exchange1.seedInvest(100*10**18, {'from': account, 'value': 1*10**18})
seed1Tx.wait(1)
# Act & Assert
with pytest.raises(Exception):
approve1Tx = token1.approve(exchange1.address, 0.1*10**18, {'from': account})
approve1Tx.wait(1)
tokenToTokenTx = exchange1.tokenToToken(account.address, token2.address, 0.1*10**18, 0, {'from': account})
tokenToTokenTx.wait(1)
def test_call_tokenToToken_seeded_only_to():
# Arrange
token1 = deploy_erc20()
token2 = deploy_erc20()
account = smart_get_account(1)
factory = AstroSwapFactory.deploy(
fee,
{'from': account},
publish_source = config["networks"][network.show_active()].get("verify", False)
)
# Create the two exchanges
exchange1, exchange2 = setupExchange(factory, token1, token2, account)
# Seed the two exchanges
approve2Tx = token2.approve(exchange2.address, 100*10**18, {'from': account})
approve2Tx.wait(1)
seed2Tx = exchange2.seedInvest(100*10**18, {'from': account, 'value': 1*10**18})
seed2Tx.wait(1)
# Act & Assert
with pytest.raises(Exception):
approve2Tx = token2.approve(exchange2.address, 0.1*10**18, {'from': account})
approve2Tx.wait(1)
tokenToTokenTx = exchange2.tokenToToken(account.address, token1.address, 0.1*10**18, 0, {'from': account})
tokenToTokenTx.wait(1)
def test_tokenToToken_quotes():
# Arrange
token1 = deploy_erc20()
token2 = deploy_erc20()
account = smart_get_account(1)
factory = AstroSwapFactory.deploy(
fee,
{'from': account},
publish_source = config["networks"][network.show_active()].get("verify", False)
)
# Create the two exchanges
exchange1, exchange2 = setupExchange(factory, token1, token2, account)
# Seed the two exchanges
approve1Tx = token1.approve(exchange1.address, 100*10**18, {'from': account})
approve1Tx.wait(1)
seed1Tx = exchange1.seedInvest(100*10**18, {'from': account, 'value': 1*10**18})
seed1Tx.wait(1)
approve2Tx = token2.approve(exchange2.address, 100*10**18, {'from': account})
approve2Tx.wait(1)
seed2Tx = exchange2.seedInvest(100*10**18, {'from': account, 'value': 1*10**18})
seed2Tx.wait(1)
# Act
tokenOutQuote = exchange1.getTokenToTokenQuote(0.1*10**18, token2.address, {'from': account})
approve1Tx = token1.approve(exchange1.address, 0.1*10**18, {'from': account})
approve1Tx.wait(1)
tokenToTokenTx = exchange1.tokenToToken(account.address, token2.address, 0.1*10**18, 0, {'from': account})
tokenToTokenTx.wait(1)
# Assert
assert tokenToTokenTx.events["TokenPurchase"]["tokensOut"] == tokenOutQuote
# And again!
tokenOutQuote2 = exchange2.getTokenToTokenQuote(11*10**18, token1.address, {'from': account})
approve2Tx = token2.approve(exchange2.address, 11*10**18, {'from': account})
approve2Tx.wait(1)
tokenToTokenTx2 = exchange2.tokenToToken(account.address, token1.address, 11*10**18, 0, {'from': account})
tokenToTokenTx2.wait(1)
# Assert
assert tokenToTokenTx2.events["TokenPurchase"]["tokensOut"] == tokenOutQuote2
# And again!
tokenOutQuote3 = exchange1.getTokenToTokenQuote(0.12345*10**18, token2.address, {'from': account})
approve1Tx = token1.approve(exchange1.address, 0.12345*10**18, {'from': account})
approve1Tx.wait(1)
tokenToTokenTx3 = exchange1.tokenToToken(account.address, token2.address, 0.12345*10**18, 0, {'from': account})
tokenToTokenTx3.wait(1)
# Assert
assert tokenToTokenTx3.events["TokenPurchase"]["tokensOut"] == tokenOutQuote3
# And again! (Random this time ;)
randomTokens = randint(0, 20*10**18)
tokenOutQuote4 = exchange1.getTokenToTokenQuote(randomTokens, token2.address, {'from': account})
approve1Tx = token1.approve(exchange1.address, randomTokens, {'from': account})
approve1Tx.wait(1)
tokenToTokenTx4 = exchange1.tokenToToken(account.address, token2.address, randomTokens, 0, {'from': account})
tokenToTokenTx4.wait(1)
# Assert
assert tokenToTokenTx4.events["TokenPurchase"]["tokensOut"] == tokenOutQuote4
def test_investQuoteFromEth():
# Arrange
token = deploy_erc20()
account = smart_get_account(1)
exchange = AstroSwapExchange.deploy(
token.address,
fee,
{'from': account},
publish_source = config["networks"][network.show_active()].get("verify", False)
)
approveTx = token.approve(exchange.address, 1000*10**18, {'from': account})
approveTx.wait(1)
seedTx = exchange.seedInvest(100*10**18, {'from': account, 'value': 1*10**18})
seedTx.wait(1)
# Act
quote = exchange.investQuoteFromEth(1*10**18)
investTx = exchange.invest(10**22, {'from': account, 'value': 1*10**18})
investTx.wait(1)
# Assert
assert investTx.events["Investment"]["tokensInvested"] == quote
# And again!
quote2 = exchange.investQuoteFromEth(0.23*10**18)
investTx2 = exchange.invest(10**22, {'from': account, 'value': 0.23*10**18})
investTx2.wait(1)
# Assert
assert investTx2.events["Investment"]["tokensInvested"] == quote2
# And again!
quote3 = exchange.investQuoteFromEth(0.12345*10**18)
investTx3 = exchange.invest(10**22, {'from': account, 'value': 0.12345*10**18})
investTx3.wait(1)
# Assert
assert investTx3.events["Investment"]["tokensInvested"] == quote3
# And again! (Zero)
quote4 = exchange.investQuoteFromEth(0)
investTx4 = exchange.invest(10**22, {'from': account, 'value': 0})
investTx4.wait(1)
# Assert
assert investTx4.events["Investment"]["tokensInvested"] == quote4
def test_investQuoteFromToken():
# Arrange
token = deploy_erc20()
account = smart_get_account(1)
exchange = AstroSwapExchange.deploy(
token.address,
fee,
{'from': account},
publish_source = config["networks"][network.show_active()].get("verify", False)
)
approveTx = token.approve(exchange.address, 1000*10**18, {'from': account})
approveTx.wait(1)
seedTx = exchange.seedInvest(100*10**18, {'from': account, 'value': 1*10**18})
seedTx.wait(1)
# Act
quote = exchange.investQuoteFromTokens(6*10**18)
print(quote)
investTx = exchange.invest(10**22, {'from': account, 'value': quote})
investTx.wait(1)
# Assert
assert investTx.events["Investment"]["tokensInvested"] == 6*10**18
# And again!
quote2 = exchange.investQuoteFromTokens(0.23*10**18)
investTx2 = exchange.invest(10**22, {'from': account, 'value': quote2})
investTx2.wait(1)
# Assert
assert investTx2.events["Investment"]["tokensInvested"] == 0.23*10**18
# And again!
quote3 = exchange.investQuoteFromTokens(0.12345*10**18)
investTx3 = exchange.invest(10**22, {'from': account, 'value': quote3})
investTx3.wait(1)
# Assert
assert investTx3.events["Investment"]["tokensInvested"] == 0.12345*10**18
# And again! (Zero)
quote4 = exchange.investQuoteFromTokens(0)
investTx4 = exchange.invest(10**22, {'from': account, 'value': quote4})
investTx4.wait(1)
# Assert
assert investTx4.events["Investment"]["tokensInvested"] == 0
| 39.393089
| 179
| 0.661632
| 4,226
| 36,478
| 5.631803
| 0.061051
| 0.030756
| 0.026555
| 0.04979
| 0.844916
| 0.80105
| 0.772521
| 0.754538
| 0.748193
| 0.711849
| 0
| 0.06538
| 0.193267
| 36,478
| 926
| 180
| 39.393089
| 0.743374
| 0.094468
| 0
| 0.722461
| 0
| 0
| 0.073711
| 0.001278
| 0
| 0
| 0.001278
| 0
| 0.105866
| 1
| 0.051502
| false
| 0
| 0.007153
| 0
| 0.060086
| 0.014306
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 7
|
e63bd12e80850b731f42225110c2f6ff1de50def
| 5,873
|
gyp
|
Python
|
third_party/cld/cld.gyp
|
zachlatta/chromium
|
c4625eefca763df86471d798ee5a4a054b4716ae
|
[
"BSD-3-Clause"
] | 1
|
2021-09-24T22:49:10.000Z
|
2021-09-24T22:49:10.000Z
|
third_party/cld/cld.gyp
|
changbai1980/chromium
|
c4625eefca763df86471d798ee5a4a054b4716ae
|
[
"BSD-3-Clause"
] | null | null | null |
third_party/cld/cld.gyp
|
changbai1980/chromium
|
c4625eefca763df86471d798ee5a4a054b4716ae
|
[
"BSD-3-Clause"
] | null | null | null |
# Copyright (c) 2009 The Chromium Authors. All rights reserved.
# Use of this source code is governed by a BSD-style license that can be
# found in the LICENSE file.
{
'includes': [
'../../build/common.gypi',
],
'conditions': [
['OS=="win"', {
'targets': [
{
'target_name': 'cld',
'type': '<(library)',
'dependencies': [
'../../base/base.gyp:base',
],
'msvs_disabled_warnings': [4005, 4006, 4018, 4244, 4309, 4800],
'defines': [
'CLD_WINDOWS',
],
'sources': [
'bar/common/scopedlibrary.h',
'bar/common/scopedptr.h',
'bar/toolbar/cld/i18n/encodings/compact_lang_det/cldutil.cc',
'bar/toolbar/cld/i18n/encodings/compact_lang_det/cldutil.h',
'bar/toolbar/cld/i18n/encodings/compact_lang_det/cldutil_dbg.h',
'bar/toolbar/cld/i18n/encodings/compact_lang_det/cldutil_dbg_empty.cc',
'bar/toolbar/cld/i18n/encodings/compact_lang_det/compact_lang_det.cc',
'bar/toolbar/cld/i18n/encodings/compact_lang_det/compact_lang_det.h',
'bar/toolbar/cld/i18n/encodings/compact_lang_det/compact_lang_det_generated_cjkbis_0.cc',
'bar/toolbar/cld/i18n/encodings/compact_lang_det/compact_lang_det_generated_ctjkvz.cc',
'bar/toolbar/cld/i18n/encodings/compact_lang_det/compact_lang_det_generated_longwords8_0.cc',
'bar/toolbar/cld/i18n/encodings/compact_lang_det/compact_lang_det_generated_meanscore.h',
'bar/toolbar/cld/i18n/encodings/compact_lang_det/compact_lang_det_generated_quads_128.cc',
'bar/toolbar/cld/i18n/encodings/compact_lang_det/compact_lang_det_impl.cc',
'bar/toolbar/cld/i18n/encodings/compact_lang_det/compact_lang_det_impl.h',
'bar/toolbar/cld/i18n/encodings/compact_lang_det/ext_lang_enc.cc',
'bar/toolbar/cld/i18n/encodings/compact_lang_det/ext_lang_enc.h',
'bar/toolbar/cld/i18n/encodings/compact_lang_det/getonescriptspan.cc',
'bar/toolbar/cld/i18n/encodings/compact_lang_det/getonescriptspan.h',
'bar/toolbar/cld/i18n/encodings/compact_lang_det/letterscript_enum.cc',
'bar/toolbar/cld/i18n/encodings/compact_lang_det/letterscript_enum.h',
'bar/toolbar/cld/i18n/encodings/compact_lang_det/subsetsequence.cc',
'bar/toolbar/cld/i18n/encodings/compact_lang_det/subsetsequence.h',
'bar/toolbar/cld/i18n/encodings/compact_lang_det/tote.cc',
'bar/toolbar/cld/i18n/encodings/compact_lang_det/tote.h',
'bar/toolbar/cld/i18n/encodings/compact_lang_det/unittest_data.h',
'bar/toolbar/cld/i18n/encodings/compact_lang_det/utf8propjustletter.h',
'bar/toolbar/cld/i18n/encodings/compact_lang_det/utf8propletterscriptnum.h',
'bar/toolbar/cld/i18n/encodings/compact_lang_det/utf8scannotjustletterspecial.h',
'bar/toolbar/cld/i18n/encodings/compact_lang_det/win/cld_basictypes.h',
'bar/toolbar/cld/i18n/encodings/compact_lang_det/win/cld_commandlineflags.h',
'bar/toolbar/cld/i18n/encodings/compact_lang_det/win/cld_macros.h',
'bar/toolbar/cld/i18n/encodings/compact_lang_det/win/cld_google.h',
'bar/toolbar/cld/i18n/encodings/compact_lang_det/win/cld_htmlutils.h',
'bar/toolbar/cld/i18n/encodings/compact_lang_det/win/cld_htmlutils_windows.cc',
'bar/toolbar/cld/i18n/encodings/compact_lang_det/win/cld_logging.h',
'bar/toolbar/cld/i18n/encodings/compact_lang_det/win/cld_scoped_ptr.h',
'bar/toolbar/cld/i18n/encodings/compact_lang_det/win/cld_scopedptr.h',
'bar/toolbar/cld/i18n/encodings/compact_lang_det/win/cld_strtoint.h',
'bar/toolbar/cld/i18n/encodings/compact_lang_det/win/cld_unicodetext.cc',
'bar/toolbar/cld/i18n/encodings/compact_lang_det/win/cld_unicodetext.h',
'bar/toolbar/cld/i18n/encodings/compact_lang_det/win/cld_unilib.h',
'bar/toolbar/cld/i18n/encodings/compact_lang_det/win/cld_unilib_windows.cc',
'bar/toolbar/cld/i18n/encodings/compact_lang_det/win/cld_utf.h',
'bar/toolbar/cld/i18n/encodings/compact_lang_det/win/cld_utf8statetable.cc',
'bar/toolbar/cld/i18n/encodings/compact_lang_det/win/cld_utf8statetable.h',
'bar/toolbar/cld/i18n/encodings/compact_lang_det/win/cld_utf8utils.h',
'bar/toolbar/cld/i18n/encodings/compact_lang_det/win/cld_utf8utils_windows.cc',
'bar/toolbar/cld/i18n/encodings/compact_lang_det/win/normalizedunicodetext.cc',
'bar/toolbar/cld/i18n/encodings/compact_lang_det/win/normalizedunicodetext.h',
'bar/toolbar/cld/i18n/encodings/internal/encodings.cc',
'bar/toolbar/cld/i18n/encodings/proto/encodings.pb.h',
'bar/toolbar/cld/i18n/encodings/public/encodings.h',
'bar/toolbar/cld/i18n/languages/internal/languages.cc',
'bar/toolbar/cld/i18n/languages/proto/languages.pb.h',
'bar/toolbar/cld/i18n/languages/public/languages.h',
'base/casts.h',
'base/commandlineflags.h',
'base/stl_decl.h',
'base/global_strip_options.h',
'base/logging.h',
'base/macros.h',
'base/crash.h',
'base/dynamic_annotations.h',
'base/scoped_ptr.h',
'base/stl_decl_msvc.h',
'base/log_severity.h',
'base/strtoint.h',
'base/vlog_is_on.h',
'base/type_traits.h',
'base/template_util.h',
],
'direct_dependent_settings': {
'defines': [
'CLD_WINDOWS',
'COMPILER_MSVC',
],
},
},],
},
],
],
}
| 55.40566
| 105
| 0.656223
| 750
| 5,873
| 4.896
| 0.174667
| 0.170752
| 0.21732
| 0.25
| 0.759532
| 0.751906
| 0.71378
| 0.71378
| 0.71378
| 0.56781
| 0
| 0.032133
| 0.210455
| 5,873
| 105
| 106
| 55.933333
| 0.759758
| 0.027073
| 0
| 0.108911
| 0
| 0
| 0.729422
| 0.67303
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| true
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 1
| 1
| 0
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 1
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
|
0
| 9
|
e653ce8ccaf0760fc330c4172704848a0ec0a3e8
| 21,668
|
py
|
Python
|
sdk/python/pulumi_alicloud/rocketmq/dnat_entry.py
|
pulumi/pulumi-alicloud
|
9c34d84b4588a7c885c6bec1f03b5016e5a41683
|
[
"ECL-2.0",
"Apache-2.0"
] | 42
|
2019-03-18T06:34:37.000Z
|
2022-03-24T07:08:57.000Z
|
sdk/python/pulumi_alicloud/rocketmq/dnat_entry.py
|
pulumi/pulumi-alicloud
|
9c34d84b4588a7c885c6bec1f03b5016e5a41683
|
[
"ECL-2.0",
"Apache-2.0"
] | 152
|
2019-04-15T21:03:44.000Z
|
2022-03-29T18:00:57.000Z
|
sdk/python/pulumi_alicloud/rocketmq/dnat_entry.py
|
pulumi/pulumi-alicloud
|
9c34d84b4588a7c885c6bec1f03b5016e5a41683
|
[
"ECL-2.0",
"Apache-2.0"
] | 3
|
2020-08-26T17:30:07.000Z
|
2021-07-05T01:37:45.000Z
|
# coding=utf-8
# *** WARNING: this file was generated by the Pulumi Terraform Bridge (tfgen) Tool. ***
# *** Do not edit by hand unless you're certain you know what you are doing! ***
import warnings
import pulumi
import pulumi.runtime
from typing import Any, Mapping, Optional, Sequence, Union, overload
from .. import _utilities
__all__ = ['DnatEntryArgs', 'DnatEntry']
@pulumi.input_type
class DnatEntryArgs:
def __init__(__self__, *,
external_port: pulumi.Input[str],
internal_ip: pulumi.Input[str],
internal_port: pulumi.Input[str],
ip_protocol: pulumi.Input[str],
sag_id: pulumi.Input[str],
type: pulumi.Input[str],
external_ip: Optional[pulumi.Input[str]] = None):
"""
The set of arguments for constructing a DnatEntry resource.
:param pulumi.Input[str] external_port: The public port.Value range: 1 to 65535 or "any".
:param pulumi.Input[str] internal_ip: The destination private IP address.
:param pulumi.Input[str] internal_port: The destination private port.Value range: 1 to 65535 or "any".
:param pulumi.Input[str] ip_protocol: The protocol type. Valid values: TCP: Forwards packets of the TCP protocol. UDP: Forwards packets of the UDP protocol. Any: Forwards packets of all protocols.
:param pulumi.Input[str] sag_id: The ID of the SAG instance.
:param pulumi.Input[str] type: The DNAT type. Valid values: Intranet: DNAT of private IP addresses. Internet: DNAT of public IP addresses
:param pulumi.Input[str] external_ip: The external public IP address.when "type" is "Internet",automatically identify the external ip.
"""
pulumi.set(__self__, "external_port", external_port)
pulumi.set(__self__, "internal_ip", internal_ip)
pulumi.set(__self__, "internal_port", internal_port)
pulumi.set(__self__, "ip_protocol", ip_protocol)
pulumi.set(__self__, "sag_id", sag_id)
pulumi.set(__self__, "type", type)
if external_ip is not None:
pulumi.set(__self__, "external_ip", external_ip)
@property
@pulumi.getter(name="externalPort")
def external_port(self) -> pulumi.Input[str]:
"""
The public port.Value range: 1 to 65535 or "any".
"""
return pulumi.get(self, "external_port")
@external_port.setter
def external_port(self, value: pulumi.Input[str]):
pulumi.set(self, "external_port", value)
@property
@pulumi.getter(name="internalIp")
def internal_ip(self) -> pulumi.Input[str]:
"""
The destination private IP address.
"""
return pulumi.get(self, "internal_ip")
@internal_ip.setter
def internal_ip(self, value: pulumi.Input[str]):
pulumi.set(self, "internal_ip", value)
@property
@pulumi.getter(name="internalPort")
def internal_port(self) -> pulumi.Input[str]:
"""
The destination private port.Value range: 1 to 65535 or "any".
"""
return pulumi.get(self, "internal_port")
@internal_port.setter
def internal_port(self, value: pulumi.Input[str]):
pulumi.set(self, "internal_port", value)
@property
@pulumi.getter(name="ipProtocol")
def ip_protocol(self) -> pulumi.Input[str]:
"""
The protocol type. Valid values: TCP: Forwards packets of the TCP protocol. UDP: Forwards packets of the UDP protocol. Any: Forwards packets of all protocols.
"""
return pulumi.get(self, "ip_protocol")
@ip_protocol.setter
def ip_protocol(self, value: pulumi.Input[str]):
pulumi.set(self, "ip_protocol", value)
@property
@pulumi.getter(name="sagId")
def sag_id(self) -> pulumi.Input[str]:
"""
The ID of the SAG instance.
"""
return pulumi.get(self, "sag_id")
@sag_id.setter
def sag_id(self, value: pulumi.Input[str]):
pulumi.set(self, "sag_id", value)
@property
@pulumi.getter
def type(self) -> pulumi.Input[str]:
"""
The DNAT type. Valid values: Intranet: DNAT of private IP addresses. Internet: DNAT of public IP addresses
"""
return pulumi.get(self, "type")
@type.setter
def type(self, value: pulumi.Input[str]):
pulumi.set(self, "type", value)
@property
@pulumi.getter(name="externalIp")
def external_ip(self) -> Optional[pulumi.Input[str]]:
"""
The external public IP address.when "type" is "Internet",automatically identify the external ip.
"""
return pulumi.get(self, "external_ip")
@external_ip.setter
def external_ip(self, value: Optional[pulumi.Input[str]]):
pulumi.set(self, "external_ip", value)
@pulumi.input_type
class _DnatEntryState:
def __init__(__self__, *,
external_ip: Optional[pulumi.Input[str]] = None,
external_port: Optional[pulumi.Input[str]] = None,
internal_ip: Optional[pulumi.Input[str]] = None,
internal_port: Optional[pulumi.Input[str]] = None,
ip_protocol: Optional[pulumi.Input[str]] = None,
sag_id: Optional[pulumi.Input[str]] = None,
type: Optional[pulumi.Input[str]] = None):
"""
Input properties used for looking up and filtering DnatEntry resources.
:param pulumi.Input[str] external_ip: The external public IP address.when "type" is "Internet",automatically identify the external ip.
:param pulumi.Input[str] external_port: The public port.Value range: 1 to 65535 or "any".
:param pulumi.Input[str] internal_ip: The destination private IP address.
:param pulumi.Input[str] internal_port: The destination private port.Value range: 1 to 65535 or "any".
:param pulumi.Input[str] ip_protocol: The protocol type. Valid values: TCP: Forwards packets of the TCP protocol. UDP: Forwards packets of the UDP protocol. Any: Forwards packets of all protocols.
:param pulumi.Input[str] sag_id: The ID of the SAG instance.
:param pulumi.Input[str] type: The DNAT type. Valid values: Intranet: DNAT of private IP addresses. Internet: DNAT of public IP addresses
"""
if external_ip is not None:
pulumi.set(__self__, "external_ip", external_ip)
if external_port is not None:
pulumi.set(__self__, "external_port", external_port)
if internal_ip is not None:
pulumi.set(__self__, "internal_ip", internal_ip)
if internal_port is not None:
pulumi.set(__self__, "internal_port", internal_port)
if ip_protocol is not None:
pulumi.set(__self__, "ip_protocol", ip_protocol)
if sag_id is not None:
pulumi.set(__self__, "sag_id", sag_id)
if type is not None:
pulumi.set(__self__, "type", type)
@property
@pulumi.getter(name="externalIp")
def external_ip(self) -> Optional[pulumi.Input[str]]:
"""
The external public IP address.when "type" is "Internet",automatically identify the external ip.
"""
return pulumi.get(self, "external_ip")
@external_ip.setter
def external_ip(self, value: Optional[pulumi.Input[str]]):
pulumi.set(self, "external_ip", value)
@property
@pulumi.getter(name="externalPort")
def external_port(self) -> Optional[pulumi.Input[str]]:
"""
The public port.Value range: 1 to 65535 or "any".
"""
return pulumi.get(self, "external_port")
@external_port.setter
def external_port(self, value: Optional[pulumi.Input[str]]):
pulumi.set(self, "external_port", value)
@property
@pulumi.getter(name="internalIp")
def internal_ip(self) -> Optional[pulumi.Input[str]]:
"""
The destination private IP address.
"""
return pulumi.get(self, "internal_ip")
@internal_ip.setter
def internal_ip(self, value: Optional[pulumi.Input[str]]):
pulumi.set(self, "internal_ip", value)
@property
@pulumi.getter(name="internalPort")
def internal_port(self) -> Optional[pulumi.Input[str]]:
"""
The destination private port.Value range: 1 to 65535 or "any".
"""
return pulumi.get(self, "internal_port")
@internal_port.setter
def internal_port(self, value: Optional[pulumi.Input[str]]):
pulumi.set(self, "internal_port", value)
@property
@pulumi.getter(name="ipProtocol")
def ip_protocol(self) -> Optional[pulumi.Input[str]]:
"""
The protocol type. Valid values: TCP: Forwards packets of the TCP protocol. UDP: Forwards packets of the UDP protocol. Any: Forwards packets of all protocols.
"""
return pulumi.get(self, "ip_protocol")
@ip_protocol.setter
def ip_protocol(self, value: Optional[pulumi.Input[str]]):
pulumi.set(self, "ip_protocol", value)
@property
@pulumi.getter(name="sagId")
def sag_id(self) -> Optional[pulumi.Input[str]]:
"""
The ID of the SAG instance.
"""
return pulumi.get(self, "sag_id")
@sag_id.setter
def sag_id(self, value: Optional[pulumi.Input[str]]):
pulumi.set(self, "sag_id", value)
@property
@pulumi.getter
def type(self) -> Optional[pulumi.Input[str]]:
"""
The DNAT type. Valid values: Intranet: DNAT of private IP addresses. Internet: DNAT of public IP addresses
"""
return pulumi.get(self, "type")
@type.setter
def type(self, value: Optional[pulumi.Input[str]]):
pulumi.set(self, "type", value)
class DnatEntry(pulumi.CustomResource):
@overload
def __init__(__self__,
resource_name: str,
opts: Optional[pulumi.ResourceOptions] = None,
external_ip: Optional[pulumi.Input[str]] = None,
external_port: Optional[pulumi.Input[str]] = None,
internal_ip: Optional[pulumi.Input[str]] = None,
internal_port: Optional[pulumi.Input[str]] = None,
ip_protocol: Optional[pulumi.Input[str]] = None,
sag_id: Optional[pulumi.Input[str]] = None,
type: Optional[pulumi.Input[str]] = None,
__props__=None):
"""
Provides a Sag DnatEntry resource. This topic describes how to add a DNAT entry to a Smart Access Gateway (SAG) instance to enable the DNAT function. By using the DNAT function, you can forward requests received by public IP addresses to Alibaba Cloud instances according to custom mapping rules.
For information about Sag DnatEntry and how to use it, see [What is Sag DnatEntry](https://www.alibabacloud.com/help/doc-detail/124312.htm).
> **NOTE:** Available in 1.63.0+
> **NOTE:** Only the following regions suppor. [`cn-shanghai`, `cn-shanghai-finance-1`, `cn-hongkong`, `ap-southeast-1`, `ap-southeast-2`, `ap-southeast-3`, `ap-southeast-5`, `ap-northeast-1`, `eu-central-1`]
## Example Usage
Basic Usage
```python
import pulumi
import pulumi_alicloud as alicloud
default = alicloud.rocketmq.DnatEntry("default",
external_ip="1.0.0.2",
external_port="1",
internal_ip="10.0.0.2",
internal_port="20",
ip_protocol="tcp",
sag_id="sag-3rb1t3iagy3w0zgwy9",
type="Intranet")
```
## Import
The Sag DnatEntry can be imported using the id, e.g.
```sh
$ pulumi import alicloud:rocketmq/dnatEntry:DnatEntry example sag-abc123456:dnat-abc123456
```
:param str resource_name: The name of the resource.
:param pulumi.ResourceOptions opts: Options for the resource.
:param pulumi.Input[str] external_ip: The external public IP address.when "type" is "Internet",automatically identify the external ip.
:param pulumi.Input[str] external_port: The public port.Value range: 1 to 65535 or "any".
:param pulumi.Input[str] internal_ip: The destination private IP address.
:param pulumi.Input[str] internal_port: The destination private port.Value range: 1 to 65535 or "any".
:param pulumi.Input[str] ip_protocol: The protocol type. Valid values: TCP: Forwards packets of the TCP protocol. UDP: Forwards packets of the UDP protocol. Any: Forwards packets of all protocols.
:param pulumi.Input[str] sag_id: The ID of the SAG instance.
:param pulumi.Input[str] type: The DNAT type. Valid values: Intranet: DNAT of private IP addresses. Internet: DNAT of public IP addresses
"""
...
@overload
def __init__(__self__,
resource_name: str,
args: DnatEntryArgs,
opts: Optional[pulumi.ResourceOptions] = None):
"""
Provides a Sag DnatEntry resource. This topic describes how to add a DNAT entry to a Smart Access Gateway (SAG) instance to enable the DNAT function. By using the DNAT function, you can forward requests received by public IP addresses to Alibaba Cloud instances according to custom mapping rules.
For information about Sag DnatEntry and how to use it, see [What is Sag DnatEntry](https://www.alibabacloud.com/help/doc-detail/124312.htm).
> **NOTE:** Available in 1.63.0+
> **NOTE:** Only the following regions suppor. [`cn-shanghai`, `cn-shanghai-finance-1`, `cn-hongkong`, `ap-southeast-1`, `ap-southeast-2`, `ap-southeast-3`, `ap-southeast-5`, `ap-northeast-1`, `eu-central-1`]
## Example Usage
Basic Usage
```python
import pulumi
import pulumi_alicloud as alicloud
default = alicloud.rocketmq.DnatEntry("default",
external_ip="1.0.0.2",
external_port="1",
internal_ip="10.0.0.2",
internal_port="20",
ip_protocol="tcp",
sag_id="sag-3rb1t3iagy3w0zgwy9",
type="Intranet")
```
## Import
The Sag DnatEntry can be imported using the id, e.g.
```sh
$ pulumi import alicloud:rocketmq/dnatEntry:DnatEntry example sag-abc123456:dnat-abc123456
```
:param str resource_name: The name of the resource.
:param DnatEntryArgs args: The arguments to use to populate this resource's properties.
:param pulumi.ResourceOptions opts: Options for the resource.
"""
...
def __init__(__self__, resource_name: str, *args, **kwargs):
resource_args, opts = _utilities.get_resource_args_opts(DnatEntryArgs, pulumi.ResourceOptions, *args, **kwargs)
if resource_args is not None:
__self__._internal_init(resource_name, opts, **resource_args.__dict__)
else:
__self__._internal_init(resource_name, *args, **kwargs)
def _internal_init(__self__,
resource_name: str,
opts: Optional[pulumi.ResourceOptions] = None,
external_ip: Optional[pulumi.Input[str]] = None,
external_port: Optional[pulumi.Input[str]] = None,
internal_ip: Optional[pulumi.Input[str]] = None,
internal_port: Optional[pulumi.Input[str]] = None,
ip_protocol: Optional[pulumi.Input[str]] = None,
sag_id: Optional[pulumi.Input[str]] = None,
type: Optional[pulumi.Input[str]] = None,
__props__=None):
if opts is None:
opts = pulumi.ResourceOptions()
if not isinstance(opts, pulumi.ResourceOptions):
raise TypeError('Expected resource options to be a ResourceOptions instance')
if opts.version is None:
opts.version = _utilities.get_version()
if opts.id is None:
if __props__ is not None:
raise TypeError('__props__ is only valid when passed in combination with a valid opts.id to get an existing resource')
__props__ = DnatEntryArgs.__new__(DnatEntryArgs)
__props__.__dict__["external_ip"] = external_ip
if external_port is None and not opts.urn:
raise TypeError("Missing required property 'external_port'")
__props__.__dict__["external_port"] = external_port
if internal_ip is None and not opts.urn:
raise TypeError("Missing required property 'internal_ip'")
__props__.__dict__["internal_ip"] = internal_ip
if internal_port is None and not opts.urn:
raise TypeError("Missing required property 'internal_port'")
__props__.__dict__["internal_port"] = internal_port
if ip_protocol is None and not opts.urn:
raise TypeError("Missing required property 'ip_protocol'")
__props__.__dict__["ip_protocol"] = ip_protocol
if sag_id is None and not opts.urn:
raise TypeError("Missing required property 'sag_id'")
__props__.__dict__["sag_id"] = sag_id
if type is None and not opts.urn:
raise TypeError("Missing required property 'type'")
__props__.__dict__["type"] = type
super(DnatEntry, __self__).__init__(
'alicloud:rocketmq/dnatEntry:DnatEntry',
resource_name,
__props__,
opts)
@staticmethod
def get(resource_name: str,
id: pulumi.Input[str],
opts: Optional[pulumi.ResourceOptions] = None,
external_ip: Optional[pulumi.Input[str]] = None,
external_port: Optional[pulumi.Input[str]] = None,
internal_ip: Optional[pulumi.Input[str]] = None,
internal_port: Optional[pulumi.Input[str]] = None,
ip_protocol: Optional[pulumi.Input[str]] = None,
sag_id: Optional[pulumi.Input[str]] = None,
type: Optional[pulumi.Input[str]] = None) -> 'DnatEntry':
"""
Get an existing DnatEntry resource's state with the given name, id, and optional extra
properties used to qualify the lookup.
:param str resource_name: The unique name of the resulting resource.
:param pulumi.Input[str] id: The unique provider ID of the resource to lookup.
:param pulumi.ResourceOptions opts: Options for the resource.
:param pulumi.Input[str] external_ip: The external public IP address.when "type" is "Internet",automatically identify the external ip.
:param pulumi.Input[str] external_port: The public port.Value range: 1 to 65535 or "any".
:param pulumi.Input[str] internal_ip: The destination private IP address.
:param pulumi.Input[str] internal_port: The destination private port.Value range: 1 to 65535 or "any".
:param pulumi.Input[str] ip_protocol: The protocol type. Valid values: TCP: Forwards packets of the TCP protocol. UDP: Forwards packets of the UDP protocol. Any: Forwards packets of all protocols.
:param pulumi.Input[str] sag_id: The ID of the SAG instance.
:param pulumi.Input[str] type: The DNAT type. Valid values: Intranet: DNAT of private IP addresses. Internet: DNAT of public IP addresses
"""
opts = pulumi.ResourceOptions.merge(opts, pulumi.ResourceOptions(id=id))
__props__ = _DnatEntryState.__new__(_DnatEntryState)
__props__.__dict__["external_ip"] = external_ip
__props__.__dict__["external_port"] = external_port
__props__.__dict__["internal_ip"] = internal_ip
__props__.__dict__["internal_port"] = internal_port
__props__.__dict__["ip_protocol"] = ip_protocol
__props__.__dict__["sag_id"] = sag_id
__props__.__dict__["type"] = type
return DnatEntry(resource_name, opts=opts, __props__=__props__)
@property
@pulumi.getter(name="externalIp")
def external_ip(self) -> pulumi.Output[Optional[str]]:
"""
The external public IP address.when "type" is "Internet",automatically identify the external ip.
"""
return pulumi.get(self, "external_ip")
@property
@pulumi.getter(name="externalPort")
def external_port(self) -> pulumi.Output[str]:
"""
The public port.Value range: 1 to 65535 or "any".
"""
return pulumi.get(self, "external_port")
@property
@pulumi.getter(name="internalIp")
def internal_ip(self) -> pulumi.Output[str]:
"""
The destination private IP address.
"""
return pulumi.get(self, "internal_ip")
@property
@pulumi.getter(name="internalPort")
def internal_port(self) -> pulumi.Output[str]:
"""
The destination private port.Value range: 1 to 65535 or "any".
"""
return pulumi.get(self, "internal_port")
@property
@pulumi.getter(name="ipProtocol")
def ip_protocol(self) -> pulumi.Output[str]:
"""
The protocol type. Valid values: TCP: Forwards packets of the TCP protocol. UDP: Forwards packets of the UDP protocol. Any: Forwards packets of all protocols.
"""
return pulumi.get(self, "ip_protocol")
@property
@pulumi.getter(name="sagId")
def sag_id(self) -> pulumi.Output[str]:
"""
The ID of the SAG instance.
"""
return pulumi.get(self, "sag_id")
@property
@pulumi.getter
def type(self) -> pulumi.Output[str]:
"""
The DNAT type. Valid values: Intranet: DNAT of private IP addresses. Internet: DNAT of public IP addresses
"""
return pulumi.get(self, "type")
| 43.336
| 304
| 0.638638
| 2,690
| 21,668
| 4.951673
| 0.0829
| 0.078453
| 0.097748
| 0.074324
| 0.876502
| 0.854354
| 0.831982
| 0.797823
| 0.765691
| 0.753904
| 0
| 0.011084
| 0.254707
| 21,668
| 499
| 305
| 43.422846
| 0.813735
| 0.375438
| 0
| 0.635338
| 1
| 0
| 0.101685
| 0.003012
| 0
| 0
| 0
| 0
| 0
| 1
| 0.157895
| false
| 0.003759
| 0.018797
| 0
| 0.270677
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 7
|
052e4c47c6e2dcdfb761c327311252c09a2a0b74
| 54
|
py
|
Python
|
nri/nri/models/__init__.py
|
take-cheeze/models
|
3ded8fd062c57f20f6154cac2dd0d998181de755
|
[
"MIT"
] | 112
|
2018-04-18T07:13:03.000Z
|
2022-03-11T03:36:34.000Z
|
nri/nri/models/__init__.py
|
take-cheeze/models
|
3ded8fd062c57f20f6154cac2dd0d998181de755
|
[
"MIT"
] | 16
|
2018-05-11T11:41:08.000Z
|
2021-04-24T03:50:54.000Z
|
nri/nri/models/__init__.py
|
take-cheeze/models
|
3ded8fd062c57f20f6154cac2dd0d998181de755
|
[
"MIT"
] | 45
|
2018-04-18T07:13:06.000Z
|
2021-12-22T03:46:18.000Z
|
from nri.models import cnn
from nri.models import mlp
| 18
| 26
| 0.814815
| 10
| 54
| 4.4
| 0.6
| 0.318182
| 0.590909
| 0.863636
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.148148
| 54
| 2
| 27
| 27
| 0.956522
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| true
| 0
| 1
| 0
| 1
| 0
| 1
| 0
| 0
| null | 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 0
| 1
| 0
|
0
| 8
|
0577982e959baa7ba68bfe5d0a4dfaed4ae8cd88
| 119
|
py
|
Python
|
util/__init__.py
|
eyalbetzalel/flowplusplus
|
e6650aafcb8bc41e386996cf2fa15608a9f90c92
|
[
"MIT"
] | 33
|
2019-02-19T08:28:09.000Z
|
2022-03-29T03:55:14.000Z
|
util/__init__.py
|
eyalbetzalel/flowplusplus
|
e6650aafcb8bc41e386996cf2fa15608a9f90c92
|
[
"MIT"
] | 5
|
2019-10-01T10:12:41.000Z
|
2021-07-16T04:45:58.000Z
|
util/__init__.py
|
eyalbetzalel/flowplusplus
|
e6650aafcb8bc41e386996cf2fa15608a9f90c92
|
[
"MIT"
] | 11
|
2019-02-19T08:32:14.000Z
|
2021-09-15T21:13:32.000Z
|
from util.array_util import *
from util.norm_util import *
from util.optim_util import *
from util.shell_util import *
| 23.8
| 29
| 0.798319
| 20
| 119
| 4.55
| 0.35
| 0.351648
| 0.461538
| 0.593407
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.134454
| 119
| 4
| 30
| 29.75
| 0.883495
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| true
| 0
| 1
| 0
| 1
| 0
| 1
| 0
| 0
| null | 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 0
| 1
| 0
|
0
| 8
|
0588821bf1e8a92d50a85216bc57b4b9a0b6097a
| 181
|
py
|
Python
|
{{ cookiecutter.project_name }}/{{ cookiecutter.package_slug }}/__init__.py
|
mysketches/data-science-blueprint
|
385d6ae8a21e32e99a453bf4e10ee006c007ca38
|
[
"MIT"
] | 11
|
2020-05-09T04:58:22.000Z
|
2021-09-18T04:42:34.000Z
|
{{ cookiecutter.project_name }}/{{ cookiecutter.package_slug }}/__init__.py
|
mysketches/data-science-blueprint
|
385d6ae8a21e32e99a453bf4e10ee006c007ca38
|
[
"MIT"
] | null | null | null |
{{ cookiecutter.project_name }}/{{ cookiecutter.package_slug }}/__init__.py
|
mysketches/data-science-blueprint
|
385d6ae8a21e32e99a453bf4e10ee006c007ca38
|
[
"MIT"
] | 2
|
2020-05-08T15:48:17.000Z
|
2021-12-28T14:41:57.000Z
|
from .operator.dataframe import tolower
from .operator.dataframe import toupper
from .operator.generator import generate
__all__ = [
'tolower',
'toupper',
'generate'
]
| 18.1
| 40
| 0.729282
| 19
| 181
| 6.736842
| 0.473684
| 0.28125
| 0.328125
| 0.421875
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.176796
| 181
| 9
| 41
| 20.111111
| 0.85906
| 0
| 0
| 0
| 1
| 0
| 0.121547
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| false
| 0
| 0.375
| 0
| 0.375
| 0
| 1
| 0
| 0
| null | 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
|
0
| 7
|
e9ae27cc4ab755e401923cac5fb864725a7cc753
| 45
|
py
|
Python
|
QtSpreadSheet/__init__.py
|
kevinfol/QtSpreadSheet
|
08d57025bab7f0addf7ea37843b0ed7eb3682a54
|
[
"MIT"
] | null | null | null |
QtSpreadSheet/__init__.py
|
kevinfol/QtSpreadSheet
|
08d57025bab7f0addf7ea37843b0ed7eb3682a54
|
[
"MIT"
] | null | null | null |
QtSpreadSheet/__init__.py
|
kevinfol/QtSpreadSheet
|
08d57025bab7f0addf7ea37843b0ed7eb3682a54
|
[
"MIT"
] | null | null | null |
from .QtSpreadSheet import QSpreadSheetWidget
| 45
| 45
| 0.911111
| 4
| 45
| 10.25
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.066667
| 45
| 1
| 45
| 45
| 0.97619
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| true
| 0
| 1
| 0
| 1
| 0
| 1
| 1
| 1
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 0
| 1
| 0
|
0
| 7
|
75694e69cd7faed406cfcb4b208fcdc8e7662476
| 284
|
py
|
Python
|
layoutlmft/layoutlmft/models/layoutxlm/__init__.py
|
Maria-philna/unilm
|
5550a335c6d2ae5838b1a90e50cb46f81edcd50f
|
[
"MIT"
] | 5,129
|
2019-09-30T11:21:03.000Z
|
2022-03-31T22:35:12.000Z
|
layoutlmft/layoutlmft/models/layoutxlm/__init__.py
|
Maria-philna/unilm
|
5550a335c6d2ae5838b1a90e50cb46f81edcd50f
|
[
"MIT"
] | 604
|
2019-10-05T00:39:46.000Z
|
2022-03-31T11:12:07.000Z
|
layoutlmft/layoutlmft/models/layoutxlm/__init__.py
|
Maria-philna/unilm
|
5550a335c6d2ae5838b1a90e50cb46f81edcd50f
|
[
"MIT"
] | 1,034
|
2019-09-30T15:01:32.000Z
|
2022-03-31T06:14:50.000Z
|
from .configuration_layoutxlm import LayoutXLMConfig
from .modeling_layoutxlm import LayoutXLMForRelationExtraction, LayoutXLMForTokenClassification, LayoutXLMModel
from .tokenization_layoutxlm import LayoutXLMTokenizer
from .tokenization_layoutxlm_fast import LayoutXLMTokenizerFast
| 56.8
| 111
| 0.915493
| 23
| 284
| 11.086957
| 0.565217
| 0.176471
| 0.196078
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.06338
| 284
| 4
| 112
| 71
| 0.958647
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| true
| 0
| 1
| 0
| 1
| 0
| 1
| 0
| 1
| null | 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 0
| 1
| 0
|
0
| 7
|
757c74795ac2894193b8a614067085ae5a124a22
| 122
|
py
|
Python
|
models/__init__.py
|
AdrianCurtin/BRITS
|
82c8b137fc2364f512259d6c858bcae7dfd8d286
|
[
"MIT"
] | null | null | null |
models/__init__.py
|
AdrianCurtin/BRITS
|
82c8b137fc2364f512259d6c858bcae7dfd8d286
|
[
"MIT"
] | null | null | null |
models/__init__.py
|
AdrianCurtin/BRITS
|
82c8b137fc2364f512259d6c858bcae7dfd8d286
|
[
"MIT"
] | null | null | null |
from . import rits_i
from . import brits_i
from . import rits
from . import brits
from . import gru_d
from . import m_rnn
| 17.428571
| 21
| 0.754098
| 22
| 122
| 4
| 0.409091
| 0.681818
| 0.318182
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.196721
| 122
| 6
| 22
| 20.333333
| 0.897959
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| true
| 0
| 1
| 0
| 1
| 0
| 1
| 0
| 0
| null | 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 0
| 1
| 0
|
0
| 7
|
75a4478b19e17acf033df64e696a14640ee4a75e
| 1,099
|
py
|
Python
|
tests/selecting/test_skip_while.py
|
sixty-north/les_iterables
|
755cdde4dd9e3d56ce2f424ff0619bde7064e897
|
[
"MIT"
] | 2
|
2020-07-04T22:21:10.000Z
|
2021-12-10T14:38:18.000Z
|
tests/selecting/test_skip_while.py
|
sixty-north/les_iterables
|
755cdde4dd9e3d56ce2f424ff0619bde7064e897
|
[
"MIT"
] | 1
|
2020-05-26T12:01:50.000Z
|
2020-05-26T12:01:50.000Z
|
tests/selecting/test_skip_while.py
|
sixty-north/les_iterables
|
755cdde4dd9e3d56ce2f424ff0619bde7064e897
|
[
"MIT"
] | null | null | null |
from les_iterables.selecting import skip_while
def test_skip_while_empty():
actual = list(skip_while([], lambda x: x%2 == 0))
assert actual == []
def test_skip_while_leaving_empty():
actual = list(skip_while([4], lambda x: x%2 == 0))
assert actual == []
def test_skip_while_none_to_skip_yielding_one():
actual = list(skip_while([3], lambda x: x%2 == 0))
assert actual == [3]
def test_skip_while_one_to_skip_yielding_one():
actual = list(skip_while([2, 5], lambda x: x%2 == 0))
assert actual == [5]
def test_skip_while_none_to_skip_yielding_two():
actual = list(skip_while([3, 7], lambda x: x%2 == 0))
assert actual == [3, 7]
def test_skip_while_one_to_skip_yielding_two():
actual = list(skip_while([2, 5, 9], lambda x: x%2 == 0))
assert actual == [5, 9]
def test_skip_while_two_to_skip_yielding_two():
actual = list(skip_while([2, 4, 5, 9], lambda x: x%2 == 0))
assert actual == [5, 9]
def test_skip_while_later_matches_yielded():
actual = list(skip_while([2, 4, 5, 9, 6], lambda x: x%2 == 0))
assert actual == [5, 9, 6]
| 27.475
| 66
| 0.655141
| 187
| 1,099
| 3.545455
| 0.171123
| 0.230769
| 0.13273
| 0.193062
| 0.864253
| 0.788839
| 0.785822
| 0.785822
| 0.38914
| 0.241327
| 0
| 0.050619
| 0.191083
| 1,099
| 40
| 67
| 27.475
| 0.695163
| 0
| 0
| 0.16
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.32
| 1
| 0.32
| false
| 0
| 0.04
| 0
| 0.36
| 0
| 0
| 0
| 0
| null | 1
| 0
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 7
|
f98f0adef984f4b61d1aa014483dbc558876d78a
| 1,587
|
py
|
Python
|
panda/apps/shipping/methods.py
|
siyecao22162/Panda
|
d6e6ab9dd2d735422879cc48cac79a4b3731d679
|
[
"BSD-3-Clause"
] | null | null | null |
panda/apps/shipping/methods.py
|
siyecao22162/Panda
|
d6e6ab9dd2d735422879cc48cac79a4b3731d679
|
[
"BSD-3-Clause"
] | 1
|
2017-07-28T19:35:07.000Z
|
2017-07-28T19:35:07.000Z
|
panda/apps/shipping/methods.py
|
siyecao22162/Panda
|
d6e6ab9dd2d735422879cc48cac79a4b3731d679
|
[
"BSD-3-Clause"
] | null | null | null |
#-*- coding: utf-8 -*-
from oscar.apps.shipping import methods
from decimal import Decimal as D
from oscar.core import prices
class StandardLunch(methods.Base):
code = 'standard lunch'
name = 'A MIDI DANS 3 JOURS'
charge_per_order = D('3.00')
threshold = D('14.99')
description = "Livraison à midi dans 3 jours; Téléphone: +33 07 84 92 06 66"
def calculate(self, basket):
# Free for orderHs over some threshold
if basket.total_incl_tax > self.threshold:
return prices.Price(
currency=basket.currency,
excl_tax=D('0.00'),
incl_tax=D('0.00'))
# Simple method - charge 0.99 per item
total = self.charge_per_order
return prices.Price(
currency=basket.currency,
excl_tax=total,
incl_tax=total)
class StandardDinner(methods.Base):
code = 'standard dinner'
name = 'A SOIR DANS 3 JOURS'
charge_per_order = D('3.00')
threshold = D('14.99')
description = "Livraison à soir dans 3 jours; Téléphone: +33 07 84 92 06 66"
def calculate(self, basket):
# Free for orders over some threshold
if basket.total_incl_tax > self.threshold:
return prices.Price(
currency=basket.currency,
excl_tax=D('0.00'),
incl_tax=D('0.00'))
# Simple method - charge 0.99 per item
total = self.charge_per_order
return prices.Price(
currency=basket.currency,
excl_tax=total,
incl_tax=total)
| 27.842105
| 80
| 0.595463
| 208
| 1,587
| 4.447115
| 0.326923
| 0.045405
| 0.043243
| 0.108108
| 0.752432
| 0.752432
| 0.752432
| 0.752432
| 0.752432
| 0.752432
| 0
| 0.055606
| 0.308759
| 1,587
| 56
| 81
| 28.339286
| 0.787603
| 0.10586
| 0
| 0.702703
| 0
| 0
| 0.156294
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0.054054
| false
| 0
| 0.081081
| 0
| 0.567568
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
|
0
| 7
|
ddb4845c614755583819a0ab94e7ce5e4324949e
| 4,226
|
py
|
Python
|
refinery/bnpy/bnpy-dev/tests/ioutil/TestModelReader.py
|
csa0001/Refinery
|
0d5de8fc3d680a2c79bd0e9384b506229787c74f
|
[
"MIT"
] | 103
|
2015-01-13T00:48:14.000Z
|
2021-11-08T10:53:22.000Z
|
refinery/bnpy/bnpy-dev/tests/ioutil/TestModelReader.py
|
csa0001/Refinery
|
0d5de8fc3d680a2c79bd0e9384b506229787c74f
|
[
"MIT"
] | 7
|
2015-02-21T04:03:40.000Z
|
2021-08-23T20:24:54.000Z
|
refinery/bnpy/bnpy-dev/tests/ioutil/TestModelReader.py
|
csa0001/Refinery
|
0d5de8fc3d680a2c79bd0e9384b506229787c74f
|
[
"MIT"
] | 27
|
2015-01-23T00:54:31.000Z
|
2020-12-30T14:30:50.000Z
|
'''
Unit tests for ModelReader.py
'''
from bnpy.ioutil import ModelWriter, ModelReader
from bnpy import HModel
from bnpy.data import XData
import numpy as np
import unittest
class TestModelReaderEMK1(unittest.TestCase):
def shortDescription(self):
return None
def setUp(self):
PRNG = np.random.RandomState(867)
X = PRNG.randn(100,2)
self.Data = XData(X=X)
aPDict = dict(alpha0=1.0)
oPDict = dict(min_covar=1e-9)
self.hmodel = HModel.CreateEntireModel('EM','MixModel','ZMGauss', aPDict, oPDict, self.Data)
initParams = dict(initname='randexamples', seed=0, K=1)
self.hmodel.init_global_params(self.Data, **initParams)
def test_save_then_load_same_model(self, prefix='Test'):
''' Verify that we can save model H to disk, load it back in as G, and
get same results from calculations on H,G with same inputs
'''
ModelWriter.save_model(self.hmodel, '/tmp/', prefix)
gmodel = ModelReader.load_model('/tmp/', prefix)
assert type(gmodel.allocModel) == type(self.hmodel.allocModel)
assert gmodel.allocModel.K == self.hmodel.allocModel.K
K = gmodel.allocModel.K
for k in range(K):
gSig = gmodel.obsModel.comp[k].ECovMat()
hSig = self.hmodel.obsModel.comp[k].ECovMat()
print gSig, hSig
assert np.allclose(gSig, hSig)
# Make sure we get same responsibilities before and after
hLP = self.hmodel.calc_local_params(self.Data)
gLP = gmodel.calc_local_params(self.Data)
assert np.allclose(hLP['resp'], gLP['resp'])
class TestModelReaderEMMixZM(unittest.TestCase):
def shortDescription(self):
return None
def setUp(self):
PRNG = np.random.RandomState(867)
X = PRNG.randn(100,2)
self.Data = XData(X=X)
aPDict = dict(alpha0=1.0)
oPDict = dict(min_covar=1e-9)
self.hmodel = HModel.CreateEntireModel('EM','MixModel','ZMGauss', aPDict, oPDict, self.Data)
initParams = dict(initname='randexamples', seed=0, K=5)
self.hmodel.init_global_params(self.Data, **initParams)
def test_save_then_load_same_model(self, prefix='Test'):
''' Verify that we can save model H to disk, load it back in as G, and
get same results from calculations on H,G with same inputs
'''
ModelWriter.save_model(self.hmodel, '/tmp/', prefix)
gmodel = ModelReader.load_model('/tmp/', prefix)
assert type(gmodel.allocModel) == type(self.hmodel.allocModel)
assert gmodel.allocModel.K == self.hmodel.allocModel.K
K = gmodel.allocModel.K
for k in range(K):
gSig = gmodel.obsModel.comp[k].ECovMat()
hSig = self.hmodel.obsModel.comp[k].ECovMat()
assert np.allclose(gSig, hSig)
# Make sure we get same responsibilities before and after
hLP = self.hmodel.calc_local_params(self.Data)
gLP = gmodel.calc_local_params(self.Data)
assert np.allclose(hLP['resp'], gLP['resp'])
class TestModelReaderVBMixZM(unittest.TestCase):
def shortDescription(self):
return None
def setUp(self):
PRNG = np.random.RandomState(867)
X = PRNG.randn(100,2)
self.Data = XData(X=X)
aPDict = dict(alpha0=1.0)
oPDict = dict(dF=4.0, ECovMat='eye', sF=1.0)
self.hmodel = HModel.CreateEntireModel('VB','MixModel','ZMGauss', aPDict, oPDict, self.Data)
initParams = dict(initname='randexamples', seed=0, K=5)
self.hmodel.init_global_params(self.Data, **initParams)
def test_save_then_load_same_model(self, prefix='Test'):
''' Verify that we can save model H to disk, load it back in as G, and
get same results from calculations on H,G with same inputs
'''
ModelWriter.save_model(self.hmodel, '/tmp/', prefix)
gmodel = ModelReader.load_model('/tmp/', prefix)
assert type(gmodel.allocModel) == type(self.hmodel.allocModel)
assert gmodel.allocModel.K == self.hmodel.allocModel.K
K = gmodel.allocModel.K
for k in range(K):
gSig = gmodel.obsModel.comp[k].ECovMat()
hSig = self.hmodel.obsModel.comp[k].ECovMat()
assert np.allclose(gSig, hSig)
# Make sure we get same responsibilities before and after
hLP = self.hmodel.calc_local_params(self.Data)
gLP = gmodel.calc_local_params(self.Data)
assert np.allclose(hLP['resp'], gLP['resp'])
| 36.119658
| 96
| 0.690487
| 600
| 4,226
| 4.795
| 0.186667
| 0.072993
| 0.043796
| 0.04171
| 0.904762
| 0.904762
| 0.904762
| 0.904762
| 0.904762
| 0.904762
| 0
| 0.013036
| 0.183152
| 4,226
| 116
| 97
| 36.431034
| 0.820394
| 0.039517
| 0
| 0.851852
| 0
| 0
| 0.043588
| 0
| 0
| 0
| 0
| 0
| 0.148148
| 0
| null | null | 0
| 0.061728
| null | null | 0.012346
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 8
|
dde71dbcc5cdeadf231e776b7f443e380e619387
| 342
|
py
|
Python
|
tests/conftest.py
|
scottbelden/avrobase
|
4ee00d083a0a477b8406cdce9f86c4f9e7ad3346
|
[
"Apache-2.0"
] | 1
|
2016-08-28T08:33:01.000Z
|
2016-08-28T08:33:01.000Z
|
tests/conftest.py
|
scottbelden/avrobase
|
4ee00d083a0a477b8406cdce9f86c4f9e7ad3346
|
[
"Apache-2.0"
] | null | null | null |
tests/conftest.py
|
scottbelden/avrobase
|
4ee00d083a0a477b8406cdce9f86c4f9e7ad3346
|
[
"Apache-2.0"
] | null | null | null |
import pytest
from avrobase.record import create_record_class
from primitive_schemas import all_schemas as all_primitive_schemas
@pytest.fixture
def BooleanClass():
return create_record_class(all_primitive_schemas['boolean_schema'])
@pytest.fixture
def IntClass():
return create_record_class(all_primitive_schemas['int_schema'])
| 22.8
| 71
| 0.830409
| 45
| 342
| 5.955556
| 0.422222
| 0.238806
| 0.190299
| 0.171642
| 0.313433
| 0.313433
| 0.313433
| 0
| 0
| 0
| 0
| 0
| 0.102339
| 342
| 14
| 72
| 24.428571
| 0.872964
| 0
| 0
| 0.222222
| 0
| 0
| 0.070175
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0.222222
| true
| 0
| 0.333333
| 0.222222
| 0.777778
| 0
| 0
| 0
| 0
| null | 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 1
| 1
| 0
| 1
| 1
| 1
| 0
|
0
| 8
|
fb22bfd899b9d29e7fc253d983db7ec1fdf4e807
| 2,296
|
py
|
Python
|
tests/testsamples/joint.py
|
Anthony102899/Lego-ImageGenerator
|
52b19c8bb20f77a3394675e7c037c943a50c1e15
|
[
"Unlicense"
] | 1
|
2022-03-20T10:23:38.000Z
|
2022-03-20T10:23:38.000Z
|
tests/testsamples/joint.py
|
Anthony102899/Lego-ImageGenerator
|
52b19c8bb20f77a3394675e7c037c943a50c1e15
|
[
"Unlicense"
] | null | null | null |
tests/testsamples/joint.py
|
Anthony102899/Lego-ImageGenerator
|
52b19c8bb20f77a3394675e7c037c943a50c1e15
|
[
"Unlicense"
] | null | null | null |
import numpy as np
from solvers.rigidity_solver.models import Model, Joint, Beam
_beams = [
Beam(np.array([
[0, 0, 0],
[0, 0, 1],
[1, 0, 0],
]) + np.array([1, 0, 0])),
Beam(np.array([
[0, 0, 0],
[0, 0, 1],
[0, 1, 0],
]) + np.array([0, 1, 0])),
]
_beams = [
Beam.tetra(
np.array([0, 0, 1]),
np.array([0, 0, 2]),
),
Beam.tetra(
np.array([0, 1, 0]),
np.array([0, 2, 0]),
),
]
_pivot = np.array([0, 0, 0])
def hinge():
model = Model()
_beams = [
Beam.tetra(
np.array([0, 0, 1]),
np.array([0, 0, 5]),
),
Beam.tetra(
np.array([0, 1, 0]),
np.array([0, 5, 0]),
),
]
_pivot = np.array([0, 0, 0])
model.add_beams(_beams)
model.add_joint(Joint(
_beams[0], _beams[1],
_pivot,
rotation_axes=np.array([0, 0, 1])
))
return model
def prismatic():
model = Model()
model.add_beams(_beams)
model.add_joint(Joint(
_beams[0], _beams[1], _pivot,
translation_vectors=np.array([0, 0, 1])
))
return model
def cylindrical():
model = Model()
model.add_beams(_beams)
model.add_joint(Joint(
_beams[0], _beams[1], _pivot,
translation_vectors=np.array([0, 0, 1]),
rotation_axes=np.array([0, 0, 1])
))
return model
def ball():
model = Model()
model.add_beams(_beams)
model.add_joint(Joint(
_beams[0], _beams[1], _pivot,
rotation_axes=np.array([
[1, 0, 0],
[0, 1, 0],
[0, 0, 1],
])
))
return model
def ball_planar():
model = Model()
model.add_beams(_beams)
model.add_joint(Joint(
_beams[0], _beams[1], _pivot,
rotation_axes=np.array([
[1, 0, 0],
[0, 1, 0],
[0, 0, 1],
]),
translation_vectors=np.array([
[1, 0, 0],
[0, 0, 1],
])
))
return model
def universal():
model = Model()
model.add_beams(_beams)
model.add_joint(Joint(
_beams[0], _beams[1], _pivot,
rotation_axes=np.array([
[1, 1, 0],
[0, 1, 0],
]),
))
return model
| 20.140351
| 61
| 0.461672
| 299
| 2,296
| 3.374582
| 0.110368
| 0.067393
| 0.134787
| 0.107037
| 0.836472
| 0.824579
| 0.804757
| 0.733399
| 0.719524
| 0.685828
| 0
| 0.07367
| 0.361498
| 2,296
| 113
| 62
| 20.318584
| 0.614598
| 0
| 0
| 0.772277
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0.059406
| false
| 0
| 0.019802
| 0
| 0.138614
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 7
|
fb3073ebe3a05912c696dd324280bbbad37f93d6
| 458
|
py
|
Python
|
base_report_to_printer/tests/__init__.py
|
Chief0-0/Localizacion_ERP_V12
|
f59e56564e29525f772b59db7fef7c7cde347336
|
[
"Apache-2.0"
] | null | null | null |
base_report_to_printer/tests/__init__.py
|
Chief0-0/Localizacion_ERP_V12
|
f59e56564e29525f772b59db7fef7c7cde347336
|
[
"Apache-2.0"
] | null | null | null |
base_report_to_printer/tests/__init__.py
|
Chief0-0/Localizacion_ERP_V12
|
f59e56564e29525f772b59db7fef7c7cde347336
|
[
"Apache-2.0"
] | null | null | null |
# Copyright 2016 LasLabs Inc.
# License AGPL-3.0 or later (http://www.gnu.org/licenses/agpl.html).
from . import test_printing_job
from . import test_printing_printer
from . import test_printing_printer_tray
from . import test_printing_server
from . import test_printing_tray
from . import test_report
from . import test_res_users
from . import test_ir_actions_report
from . import test_printing_printer_wizard
from . import test_printing_report_xml_action
| 32.714286
| 68
| 0.829694
| 71
| 458
| 5.014085
| 0.450704
| 0.280899
| 0.393258
| 0.432584
| 0.244382
| 0
| 0
| 0
| 0
| 0
| 0
| 0.014778
| 0.113537
| 458
| 13
| 69
| 35.230769
| 0.862069
| 0.20524
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| true
| 0
| 1
| 0
| 1
| 0.7
| 0
| 0
| 0
| null | 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 0
| 1
| 1
|
0
| 7
|
fb322b4620c362b5cfc8574163e49e90a2547bfb
| 80,843
|
py
|
Python
|
ray_elegantrl/net.py
|
GyChou/ray_elegant_carla
|
16de9f297e24b564ec19d9cf3e9293450ac78cdd
|
[
"MIT"
] | null | null | null |
ray_elegantrl/net.py
|
GyChou/ray_elegant_carla
|
16de9f297e24b564ec19d9cf3e9293450ac78cdd
|
[
"MIT"
] | null | null | null |
ray_elegantrl/net.py
|
GyChou/ray_elegant_carla
|
16de9f297e24b564ec19d9cf3e9293450ac78cdd
|
[
"MIT"
] | null | null | null |
import torch
import torch.nn as nn
import numpy as np
import torch.nn.functional as F
from torch.distributions import MultivariateNormal, Normal, Categorical
from scipy.stats import beta as scipy_beta
from scipy.optimize import fmin
"""
Modify [ElegantRL](https://github.com/AI4Finance-LLC/ElegantRL)
by https://github.com/GyChou
"""
'''Q Network'''
class QNet(nn.Module): # nn.Module is a standard PyTorch Network
def __init__(self, mid_dim, state_dim, action_dim):
super().__init__()
self.net = nn.Sequential(nn.Linear(state_dim, mid_dim), nn.ReLU(),
nn.Linear(mid_dim, mid_dim), nn.ReLU(),
nn.Linear(mid_dim, mid_dim), nn.ReLU(),
nn.Linear(mid_dim, action_dim))
self.explore_rate = 1.
def forward(self, state):
return self.net(state) # Q value
class QNetDuel(nn.Module): # Dueling DQN
def __init__(self, mid_dim, state_dim, action_dim):
super().__init__()
self.net_state = nn.Sequential(nn.Linear(state_dim, mid_dim), nn.ReLU(),
nn.Linear(mid_dim, mid_dim), nn.ReLU())
self.net_val = nn.Sequential(nn.Linear(mid_dim, mid_dim), nn.ReLU(),
nn.Linear(mid_dim, 1)) # Q value
self.net_adv = nn.Sequential(nn.Linear(mid_dim, mid_dim), nn.ReLU(),
nn.Linear(mid_dim, action_dim)) # advantage function value 1
self.explore_rate = 1.
def forward(self, state):
t_tmp = self.net_state(state)
q_val = self.net_val(t_tmp)
q_adv = self.net_adv(t_tmp)
return q_val + q_adv - q_adv.mean(dim=1, keepdim=True) # dueling Q value
class QNetTwin(nn.Module): # Double DQN
def __init__(self, mid_dim, state_dim, action_dim):
super().__init__()
self.net_state = nn.Sequential(nn.Linear(state_dim, mid_dim), nn.ReLU(),
nn.Linear(mid_dim, mid_dim), nn.ReLU()) # state
self.net_q1 = nn.Sequential(nn.Linear(mid_dim, mid_dim), nn.ReLU(),
nn.Linear(mid_dim, action_dim)) # q1 value
self.net_q2 = nn.Sequential(nn.Linear(mid_dim, mid_dim), nn.ReLU(),
nn.Linear(mid_dim, action_dim)) # q2 value
self.softmax = torch.nn.Softmax(dim=1)
self.explore_rate = 1.
# policy
def forward(self, state):
tmp = self.net_state(state)
q = self.net_q1(tmp)
return self.softmax(q).argmax(dim=1)
def get_a_prob(self, state):
tmp = self.net_state(state)
q = self.net_q1(tmp)
return self.softmax(q)
def get_q1(self, state):
tmp = self.net_state(state)
return self.net_q1(tmp)
def get_q1_q2(self, state):
tmp = self.net_state(state)
q1 = self.net_q1(tmp)
q2 = self.net_q2(tmp)
return q1, q2 # two Q values
class QNetTwinDuel(nn.Module): # D3QN: Dueling Double DQN
def __init__(self, mid_dim, state_dim, action_dim):
super().__init__()
self.net_state = nn.Sequential(nn.Linear(state_dim, mid_dim), nn.ReLU(),
nn.Linear(mid_dim, mid_dim), nn.ReLU())
self.net_val1 = nn.Sequential(nn.Linear(mid_dim, mid_dim), nn.ReLU(),
nn.Linear(mid_dim, 1)) # q1 value
self.net_val2 = nn.Sequential(nn.Linear(mid_dim, mid_dim), nn.ReLU(),
nn.Linear(mid_dim, 1)) # q2 value
self.net_adv1 = nn.Sequential(nn.Linear(mid_dim, mid_dim), nn.ReLU(),
nn.Linear(mid_dim, action_dim)) # advantage function value 1
self.net_adv2 = nn.Sequential(nn.Linear(mid_dim, mid_dim), nn.ReLU(),
nn.Linear(mid_dim, action_dim)) # advantage function value 1
self.softmax = torch.nn.Softmax(dim=1)
self.explore_rate = 1.
# policy
def forward(self, state):
t_tmp = self.net_state(state)
q_val = self.net_val1(t_tmp)
q_adv = self.net_adv1(t_tmp)
q = q_val + q_adv - q_adv.mean(dim=1, keepdim=True) # one dueling Q value
return self.softmax(q).argmax(dim=1)
def get_a_prob(self, state):
t_tmp = self.net_state(state)
q_val = self.net_val1(t_tmp)
q_adv = self.net_adv1(t_tmp)
q = q_val + q_adv - q_adv.mean(dim=1, keepdim=True) # one dueling Q value
return self.softmax(q)
def get_q1(self, state):
t_tmp = self.net_state(state)
q_val = self.net_val1(t_tmp)
q_adv = self.net_adv1(t_tmp)
return q_val + q_adv - q_adv.mean(dim=1, keepdim=True) # one dueling Q value
def get_q1_q2(self, state):
tmp = self.net_state(state)
val1 = self.net_val1(tmp)
adv1 = self.net_adv1(tmp)
q1 = val1 + adv1 - adv1.mean(dim=1, keepdim=True)
val2 = self.net_val2(tmp)
adv2 = self.net_adv2(tmp)
q2 = val2 + adv2 - adv2.mean(dim=1, keepdim=True)
return q1, q2 # two dueling Q values
'''Policy Network (Actor)'''
class Actor(nn.Module): # DPG: Deterministic Policy Gradient
def __init__(self, mid_dim, state_dim, action_dim):
super().__init__()
self.net = nn.Sequential(nn.Linear(state_dim, mid_dim), nn.ReLU(),
nn.Linear(mid_dim, mid_dim), nn.ReLU(),
nn.Linear(mid_dim, mid_dim), nn.ReLU(),
nn.Linear(mid_dim, action_dim))
def forward(self, state):
return self.net(state).tanh() # action.tanh()
def get_action(self, state, action_std):
action = self.net(state).tanh()
noise = (torch.randn_like(action) * action_std).clamp(-0.5, 0.5)
return (action + noise).clamp(-1.0, 1.0)
class RNNPPO(nn.Module):
def __init__(self, mid_dim, state_dim, action_dim, hidden_state_dim, if_use_dn=False):
super().__init__()
self.rnn = nn.LSTMCell(state_dim, hidden_state_dim)
self.hidden_state_dim = hidden_state_dim
self.critic_net = nn.Sequential(nn.Linear(hidden_state_dim + state_dim, mid_dim), nn.ReLU(),
nn.Linear(mid_dim, mid_dim), nn.ReLU(),
# nn.Linear(mid_dim, mid_dim), nn.ReLU(), # nn.Hardswish(),
nn.Linear(mid_dim, 1), )
self.actor_net = nn.Sequential(nn.Linear(hidden_state_dim + state_dim, mid_dim), nn.ReLU(),
nn.Linear(mid_dim, mid_dim), nn.ReLU(),
# nn.Linear(mid_dim, mid_dim), nn.Hardswish(),
nn.Linear(mid_dim, action_dim), )
self.a_std_log = nn.Parameter(torch.zeros((1, action_dim)) - 0.5, requires_grad=True) # trainable parameter
self.sqrt_2pi_log = np.log(np.sqrt(2 * np.pi))
lstm_layer_norm(self.rnn, self.hidden_state_dim)
layer_norm(self.actor_net[-1], std=0.1) # output layer for action
layer_norm(self.critic_net[-1], std=0.5) # output layer for Q value
def critic_forward(self, state, hidden_state, cell_state): # state: batch, dim
rnn_embading, _ = self.lstm_infer(state, hidden_state, cell_state)
return self.critic_net(torch.cat([state, rnn_embading], dim=1)) # V value
def actor_forward(self, state, hidden_state, cell_state): # state: batch, dim
hidden_state_next, cell_state_next = self.lstm_infer(state, hidden_state, cell_state)
return self.actor_net(torch.cat([state, hidden_state_next], dim=1)).tanh(), hidden_state_next, cell_state_next
def forward(self, state, hidden_state, cell_state, len_sequence): # state: timestep, batch, dim
rnn_embading = self.lstm_forward(state, hidden_state, cell_state, len_sequence)
state = state.permute(1, 0, 2)
state = torch.cat([batch[:len_sequence[i]] for i, batch in enumerate(state)], dim=0)
input = torch.cat([state, rnn_embading], dim=1)
return self.actor_net(input).tanh(), self.critic_net(input)
def lstm_forward(self, state, hidden_state, cell_state, len_sequence): # state: timestep, batch, dim
if not self.if_store_state:
hidden_state = torch.zeros([state.shape[1], self.hidden_state_dim]).to(device=state.device) # zero state
cell_state = torch.zeros([state.shape[1], self.hidden_state_dim]).to(device=state.device) # zero state
rnn_ouput_list = []
for i in range(len(state)):
hidden_state, cell_state = self.rnn(state[i], (hidden_state, cell_state))
rnn_ouput_list.append(hidden_state)
tmp_output = torch.stack(rnn_ouput_list, dim=1)
rnn_embading = torch.cat([batch[:len_sequence[i]] for i, batch in enumerate(tmp_output)], dim=0)
return rnn_embading
def lstm_infer(self, state, hidden_state, cell_state): # state: batch, dim
hidden_state, cell_state = self.rnn(state, (hidden_state, cell_state))
return hidden_state, cell_state
def get_action(self, state, hidden_state, cell_state): # state: batch, dim
hidden_state_next, cell_state_next = self.lstm_infer(state, hidden_state, cell_state)
input = torch.cat([state, hidden_state_next], dim=1)
pi = self.get_distribution(input)
return pi.sample().tanh(), hidden_state_next, cell_state_next
def get_distribution(self, state):
a_avg = self.actor_net(state)
a_std = self.a_std_log.clamp(-20, 2).exp()
return Normal(loc=a_avg, scale=a_std)
def compute_logprob_infer(self, state, action, hidden_state, cell_state):
action = action.atanh()
rnn_embading, _ = self.lstm_infer(state, hidden_state, cell_state)
input = torch.cat([state, rnn_embading], dim=1)
pi = self.get_distribution(input)
return pi.log_prob(action).sum(dim=1)
def compute_logprob(self, state, action, hidden_state, cell_state, len_sequence):
action = action.atanh()
rnn_embading = self.lstm_forward(state, hidden_state, cell_state, len_sequence)
state = state.permute(1, 0, 2)
state = torch.cat([batch[:len_sequence[i]] for i, batch in enumerate(state)], dim=0)
input = torch.cat([state, rnn_embading], dim=1)
pi = self.get_distribution(input)
delta = ((pi.loc - action) / pi.scale).pow(2).__mul__(0.5) # __mul__(0.5) is * 0.5
log_prob1 = -(self.a_std_log.clamp(-20, 2) + self.sqrt_2pi_log + delta).sum(1)
return log_prob1
class RNNEmbedding(nn.Module):
def __init__(self, hidden_state_dim, state_dim, embedding_dim=64, if_store_state=True):
super().__init__()
self.ego_state = 12
self.state_dim = state_dim
self.if_store_state = if_store_state
self.rnn = nn.LSTMCell(self.ego_state, hidden_state_dim)
self.hidden_state_dim = hidden_state_dim
self.embedding = nn.Linear(self.state_dim + self.hidden_state_dim, embedding_dim) # the average of action
lstm_layer_norm(self.rnn, self.hidden_state_dim)
def forward(self, state, hidden_state, cell_state, len_sequence): # state: timestep, batch, dim
rnn_input = state[:, :, :self.ego_state]
rnn_embading = self.sequence_infer(rnn_input, hidden_state, cell_state, len_sequence)
state = state.permute(1, 0, 2)
state = torch.cat([batch[:len_sequence[i]] for i, batch in enumerate(state)], dim=0)
return self.embedding(torch.cat([state, rnn_embading], dim=1))
def sequence_infer(self, state, hidden_state, cell_state, len_sequence): # state: timestep, batch, dim
if not self.if_store_state:
hidden_state = torch.zeros([state.shape[1], self.hidden_state_dim]).to(device=state.device) # zero state
cell_state = torch.zeros([state.shape[1], self.hidden_state_dim]).to(device=state.device) # zero state
rnn_ouput_list = []
for i in range(len(state)):
hidden_state, cell_state = self.rnn(state[i], (hidden_state, cell_state))
rnn_ouput_list.append(hidden_state)
tmp_output = torch.stack(rnn_ouput_list, dim=0)
output = torch.cat([batch[:len_sequence[i]] for i, batch in enumerate(tmp_output)], dim=0)
return output
def single_infer(self, state, hidden_state, cell_state): # state: batch, dim
input = state[:, :self.ego_state]
hidden_state, cell_state = self.rnn(input, (hidden_state, cell_state))
return hidden_state, cell_state
def embedding_infer(self, state, hidden_state, cell_state):
next_hidden_state, next_cell_state = self.single_infer(state, hidden_state, cell_state)
embedding = self.embedding(torch.cat([state, next_hidden_state], dim=1))
return embedding, next_hidden_state, next_cell_state
class CarlaRNNPPO(nn.Module):
def __init__(self, mid_dim, state_dim, action_dim, hidden_state_dim, if_store_state=True, if_use_dn=False):
super().__init__()
self.ego_state = 12
self.state_dim = state_dim
self.if_store_state = if_store_state
self.rnn = nn.LSTMCell(self.ego_state, hidden_state_dim)
self.hidden_state_dim = hidden_state_dim
self.critic_net = nn.Sequential(nn.Linear(hidden_state_dim + state_dim, mid_dim), nn.ReLU(),
nn.Linear(mid_dim, mid_dim), nn.ReLU(),
# nn.Linear(mid_dim, mid_dim), nn.ReLU(), # nn.Hardswish(),
nn.Linear(mid_dim, 1), )
self.actor_net = nn.Sequential(nn.Linear(hidden_state_dim + state_dim, mid_dim), nn.ReLU(),
nn.Linear(mid_dim, mid_dim), nn.ReLU(),
# nn.Linear(mid_dim, mid_dim), nn.Hardswish(),
nn.Linear(mid_dim, action_dim), )
self.a_std_log = nn.Parameter(torch.zeros((1, action_dim)) - 0.5, requires_grad=True) # trainable parameter
self.sqrt_2pi_log = np.log(np.sqrt(2 * np.pi))
lstm_layer_norm(self.rnn, self.hidden_state_dim)
layer_norm(self.actor_net[-1], std=0.1) # output layer for action
layer_norm(self.critic_net[-1], std=0.5) # output layer for Q value
def critic_forward(self, state, hidden_state, cell_state): # state: batch, dim
rnn_input = state[:, :self.ego_state]
rnn_embading, _ = self.lstm_infer(rnn_input, hidden_state, cell_state)
return self.critic_net(torch.cat([state, rnn_embading], dim=1)) # V value
def actor_forward(self, state, hidden_state, cell_state): # state: batch, dim
rnn_input = state[:, :self.ego_state]
hidden_state_next, cell_state_next = self.lstm_infer(rnn_input, hidden_state, cell_state)
return self.actor_net(
torch.cat([state, hidden_state_next], dim=1)).tanh(), hidden_state_next, cell_state_next
def forward(self, state, hidden_state, cell_state, len_sequence): # state: timestep, batch, dim
rnn_input = state[:, :, :self.ego_state]
rnn_embading = self.lstm_forward(rnn_input, hidden_state, cell_state, len_sequence)
state = state.permute(1, 0, 2)
state = torch.cat([batch[:len_sequence[i]] for i, batch in enumerate(state)], dim=0)
input = torch.cat([state, rnn_embading], dim=1)
return self.actor_net(input).tanh(), self.critic_net(input)
def lstm_forward(self, state, hidden_state, cell_state, len_sequence): # state: timestep, batch, dim
if not self.if_store_state:
hidden_state = torch.zeros([state.shape[1], self.hidden_state_dim]).to(device=state.device) # zero state
cell_state = torch.zeros([state.shape[1], self.hidden_state_dim]).to(device=state.device) # zero state
rnn_ouput_list = []
for i in range(len(state)):
hidden_state, cell_state = self.rnn(state[i], (hidden_state, cell_state))
rnn_ouput_list.append(hidden_state)
tmp_output = torch.stack(rnn_ouput_list, dim=1)
rnn_embading = torch.cat([batch[:len_sequence[i]] for i, batch in enumerate(tmp_output)], dim=0)
return rnn_embading
def lstm_infer(self, state, hidden_state, cell_state): # state: batch, dim
hidden_state, cell_state = self.rnn(state, (hidden_state, cell_state))
return hidden_state, cell_state
def get_action(self, state, hidden_state, cell_state): # state: batch, dim
rnn_input = state[:, :self.ego_state]
hidden_state_next, cell_state_next = self.lstm_infer(rnn_input, hidden_state, cell_state)
input = torch.cat([state, hidden_state_next], dim=1)
pi = self.get_distribution(input)
return pi.sample().tanh(), hidden_state_next, cell_state_next
def get_distribution(self, state):
a_avg = self.actor_net(state)
a_std = self.a_std_log.clamp(-20, 2).exp()
# if state[:, :self.state_dim].isnan().any():
# print("state")
# print(state[:, :self.state_dim])
# if state[:, self.state_dim:].isnan().any():
# print("hidden state")
# print(state[:, self.state_dim:])
# if a_avg.isnan().any() or a_std.isnan().any() or a_avg.isinf().any() or a_std.isinf().any():
# print(a_avg)
return Normal(loc=a_avg, scale=a_std)
def compute_logprob_infer(self, state, action, hidden_state, cell_state):
action = action.atanh()
rnn_input = state[:, :self.ego_state]
rnn_embading, _ = self.lstm_infer(rnn_input, hidden_state, cell_state)
input = torch.cat([state, rnn_embading], dim=1)
pi = self.get_distribution(input)
return pi.log_prob(action).sum(dim=1)
def compute_logprob(self, state, action, hidden_state, cell_state, len_sequence):
action = action.atanh()
rnn_input = state[:, :, :self.ego_state]
rnn_embading = self.lstm_forward(rnn_input, hidden_state, cell_state, len_sequence)
state = state.permute(1, 0, 2)
state = torch.cat([batch[:len_sequence[i]] for i, batch in enumerate(state)], dim=0)
input = torch.cat([state, rnn_embading], dim=1)
pi = self.get_distribution(input)
delta = ((pi.loc - action) / pi.scale).pow(2).__mul__(0.5) # __mul__(0.5) is * 0.5
log_prob1 = -(self.a_std_log.clamp(-20, 2) + self.sqrt_2pi_log + delta).sum(1)
return log_prob1
class CarlaRNNPPOMG(nn.Module):
def __init__(self, mid_dim, state_dim, action_dim, hidden_state_dim, if_store_state=True, if_use_dn=False):
super().__init__()
self.ego_state = 12
self.state_dim = state_dim
self.action_dim = action_dim
self.if_store_state = if_store_state
self.rnn = nn.LSTMCell(self.ego_state, hidden_state_dim)
self.hidden_state_dim = hidden_state_dim
self.critic_net = nn.Sequential(nn.Linear(hidden_state_dim + state_dim, mid_dim), nn.ReLU(),
nn.Linear(mid_dim, mid_dim), nn.ReLU(),
# nn.Linear(mid_dim, mid_dim), nn.ReLU(), # nn.Hardswish(),
nn.Linear(mid_dim, 1), )
self.actor_net = nn.Sequential(nn.Linear(hidden_state_dim + state_dim, mid_dim), nn.ReLU(),
nn.Linear(mid_dim, mid_dim), nn.ReLU(), )
self.actor_net_a_loc = nn.Linear(mid_dim, action_dim) # the average of action
self.actor_net_a_cholesky = nn.Linear(mid_dim, (action_dim * (action_dim + 1)) // 2)
self.softplus = nn.Softplus(threshold=18.)
lstm_layer_norm(self.rnn, self.hidden_state_dim)
layer_norm(self.actor_net_a_loc, std=0.1) # output layer for action
layer_norm(self.critic_net[-1], std=0.5) # output layer for Q value
def critic_forward(self, state, hidden_state, cell_state): # state: batch, dim
rnn_input = state[:, :self.ego_state]
rnn_embading, _ = self.lstm_infer(rnn_input, hidden_state, cell_state)
return self.critic_net(torch.cat([state, rnn_embading], dim=1)) # V value
def actor_forward(self, state, hidden_state, cell_state): # state: batch, dim
rnn_input = state[:, :self.ego_state]
hidden_state_next, cell_state_next = self.lstm_infer(rnn_input, hidden_state, cell_state)
tmp = self.actor_net(torch.cat([state, hidden_state_next], dim=1))
return self.actor_net_a_loc(tmp).tanh(), hidden_state_next, cell_state_next
def forward(self, state, hidden_state, cell_state, len_sequence): # state: timestep, batch, dim
rnn_input = state[:, :, :self.ego_state]
rnn_embading = self.lstm_forward(rnn_input, hidden_state, cell_state, len_sequence)
state = state.permute(1, 0, 2)
state = torch.cat([batch[:len_sequence[i]] for i, batch in enumerate(state)], dim=0)
input = torch.cat([state, rnn_embading], dim=1)
tmp = self.actor_net(input)
return self.actor_net_a_loc(tmp).tanh(), self.critic_net(input)
def lstm_forward(self, state, hidden_state, cell_state, len_sequence): # state: timestep, batch, dim
if not self.if_store_state:
hidden_state = torch.zeros([state.shape[1], self.hidden_state_dim]).to(device=state.device) # zero state
cell_state = torch.zeros([state.shape[1], self.hidden_state_dim]).to(device=state.device) # zero state
rnn_ouput_list = []
for i in range(len(state)):
hidden_state, cell_state = self.rnn(state[i], (hidden_state, cell_state))
rnn_ouput_list.append(hidden_state)
tmp_output = torch.stack(rnn_ouput_list, dim=1)
rnn_embading = torch.cat([batch[:len_sequence[i]] for i, batch in enumerate(tmp_output)], dim=0)
return rnn_embading
def lstm_infer(self, state, hidden_state, cell_state): # state: batch, dim
hidden_state, cell_state = self.rnn(state, (hidden_state, cell_state))
return hidden_state, cell_state
def get_action(self, state, hidden_state, cell_state): # state: batch, dim
rnn_input = state[:, :self.ego_state]
hidden_state_next, cell_state_next = self.lstm_infer(rnn_input, hidden_state, cell_state)
input = torch.cat([state, hidden_state_next], dim=1)
pi = self.get_distribution(input)
return pi.sample().tanh(), hidden_state_next, cell_state_next
def get_loc_cholesky(self, state):
t_tmp = self.actor_net(state)
a_loc = self.actor_net_a_loc(t_tmp) # NOTICE! it is a_loc without .tanh()
# a_cholesky_vector = self.net_a_cholesky(t_tmp).relu()
a_cholesky_vector = self.softplus(self.actor_net_a_cholesky(t_tmp))
# cholesky_diag_index = torch.arange(self.action_dim, dtype=torch.long) + 1
# cholesky_diag_index = (cholesky_diag_index * (cholesky_diag_index + 1)) // 2 - 1
# a_cholesky_vector[:, cholesky_diag_index] = self.softplus(a_cholesky_vector[:, cholesky_diag_index])
tril_indices = torch.tril_indices(row=self.action_dim, col=self.action_dim, offset=0)
a_cholesky = torch.zeros(size=(a_loc.shape[0], self.action_dim, self.action_dim), dtype=torch.float32,
device=t_tmp.device)
a_cholesky[:, tril_indices[0], tril_indices[1]] = a_cholesky_vector
return a_loc, a_cholesky
def get_distribution(self, state):
loc, cholesky = self.get_loc_cholesky(state)
return MultivariateNormal(loc=loc, scale_tril=cholesky)
def compute_logprob_infer(self, state, action, hidden_state, cell_state):
action = action.atanh()
rnn_input = state[:, :self.ego_state]
rnn_embading, _ = self.lstm_infer(rnn_input, hidden_state, cell_state)
input = torch.cat([state, rnn_embading], dim=1)
pi = self.get_distribution(input)
return pi.log_prob(action)
def compute_logprob(self, state, action, hidden_state, cell_state, len_sequence):
action = action.atanh()
rnn_input = state[:, :, :self.ego_state]
rnn_embading = self.lstm_forward(rnn_input, hidden_state, cell_state, len_sequence)
state = state.permute(1, 0, 2)
state = torch.cat([batch[:len_sequence[i]] for i, batch in enumerate(state)], dim=0)
input = torch.cat([state, rnn_embading], dim=1)
pi = self.get_distribution(input)
return pi.log_prob(action)
class CarlaRNNPPOSequence(nn.Module):
def __init__(self, mid_dim, state_dim, action_dim, hidden_state_dim, if_store_state=True, if_use_dn=False):
super().__init__()
self.ego_state = 12
self.state_dim = state_dim
self.if_store_state = if_store_state
self.rnn = nn.LSTMCell(self.ego_state, hidden_state_dim)
self.hidden_state_dim = hidden_state_dim
self.critic_net = nn.Sequential(nn.Linear(hidden_state_dim + state_dim, mid_dim), nn.ReLU(),
nn.Linear(mid_dim, mid_dim), nn.ReLU(),
# nn.Linear(mid_dim, mid_dim), nn.ReLU(), # nn.Hardswish(),
nn.Linear(mid_dim, 1), )
self.actor_net = nn.Sequential(nn.Linear(hidden_state_dim + state_dim, mid_dim), nn.ReLU(),
nn.Linear(mid_dim, mid_dim), nn.ReLU(),
# nn.Linear(mid_dim, mid_dim), nn.Hardswish(),
nn.Linear(mid_dim, action_dim), )
self.a_std_log = nn.Parameter(torch.zeros((1, action_dim)) - 0.5, requires_grad=True) # trainable parameter
self.sqrt_2pi_log = np.log(np.sqrt(2 * np.pi))
lstm_layer_norm(self.rnn, self.hidden_state_dim)
layer_norm(self.actor_net[-1], std=0.1) # output layer for action
layer_norm(self.critic_net[-1], std=0.5) # output layer for Q value
def critic_forward(self, state, hidden_state, cell_state): # state: timestep, dim
rnn_input = state[:, :self.ego_state].unsqueeze(dim=0)
rnn_embading, _ = self.lstm_infer(rnn_input, hidden_state, cell_state)
return self.critic_net(torch.cat([state, rnn_embading], dim=1)) # V value
def actor_forward(self, state, hidden_state, cell_state): # state: timestep, dim
rnn_input = state[:, :self.ego_state].unsqueeze(dim=1)
hidden_state_next, cell_state_next = self.lstm_infer(rnn_input, hidden_state, cell_state)
return self.actor_net(
torch.cat([state[-1].unsqueeze(dim=0), hidden_state_next],
dim=1)).tanh(), hidden_state_next, cell_state_next
def forward(self, state, hidden_state, cell_state, len_sequence): # state: timestep, batch, dim
rnn_input = state[:, :, :self.ego_state]
rnn_embading = self.lstm_forward(rnn_input, hidden_state, cell_state, len_sequence)
state = state.permute(1, 0, 2)
state = torch.cat([batch[:len_sequence[i]] for i, batch in enumerate(state)], dim=0)
input = torch.cat([state, rnn_embading], dim=1)
return self.actor_net(input).tanh(), self.critic_net(input)
def lstm_forward(self, state, hidden_state, cell_state, len_sequence): # state: timestep, batch, dim
if not self.if_store_state:
hidden_state = torch.zeros([state.shape[1], self.hidden_state_dim]).to(device=state.device) # zero state
cell_state = torch.zeros([state.shape[1], self.hidden_state_dim]).to(device=state.device) # zero state
rnn_ouput_list = []
for i in range(len(state)):
hidden_state, cell_state = self.rnn(state[i], (hidden_state, cell_state))
rnn_ouput_list.append(hidden_state)
tmp_output = torch.stack(rnn_ouput_list, dim=1)
rnn_embading = torch.cat([batch[:len_sequence[i]] for i, batch in enumerate(tmp_output)], dim=0)
return rnn_embading
def lstm_infer(self, state, hidden_state, cell_state): # state: timestep, 1, dim
for i in range(state.shape[0]):
hidden_state, cell_state = self.rnn(state[i], (hidden_state, cell_state))
return hidden_state, cell_state
def get_action(self, state, hidden_state, cell_state): # state: timestep, dim
rnn_input = state[:, :self.ego_state].unsqueeze(dim=1) # rnn_input: timestep, 1, dim
hidden_state_next, cell_state_next = self.lstm_infer(rnn_input, hidden_state, cell_state)
input = torch.cat([state[-1].unsqueeze(dim=0), hidden_state_next], dim=1)
pi = self.get_distribution(input)
return pi.sample().tanh(), hidden_state_next, cell_state_next
def get_distribution(self, state):
a_avg = self.actor_net(state)
a_std = self.a_std_log.clamp(-20, 2).exp()
# if state[:, :self.state_dim].isnan().any():
# print("state")
# print(state[:, :self.state_dim])
# if state[:, self.state_dim:].isnan().any():
# print("hidden state")
# print(state[:, self.state_dim:])
# if a_avg.isnan().any() or a_std.isnan().any() or a_avg.isinf().any() or a_std.isinf().any():
# print(a_avg)
return Normal(loc=a_avg, scale=a_std)
def compute_logprob_infer(self, state, action, hidden_state, cell_state):
action = action.atanh()
rnn_input = state[:, :self.ego_state].unsqueeze(dim=0)
rnn_embading, _ = self.lstm_infer(rnn_input, hidden_state, cell_state)
input = torch.cat([state, rnn_embading], dim=1)
pi = self.get_distribution(input)
return pi.log_prob(action).sum(dim=1)
def compute_logprob(self, state, action, hidden_state, cell_state, len_sequence):
action = action.atanh()
rnn_input = state[:, :, :self.ego_state]
rnn_embading = self.lstm_forward(rnn_input, hidden_state, cell_state, len_sequence)
state = state.permute(1, 0, 2)
state = torch.cat([batch[:len_sequence[i]] for i, batch in enumerate(state)], dim=0)
input = torch.cat([state, rnn_embading], dim=1)
pi = self.get_distribution(input)
delta = ((pi.loc - action) / pi.scale).pow(2).__mul__(0.5) # __mul__(0.5) is * 0.5
log_prob1 = -(self.a_std_log.clamp(-20, 2) + self.sqrt_2pi_log + delta).sum(1)
return log_prob1
class ActorPPO(nn.Module):
def __init__(self, mid_dim, state_dim, action_dim, if_use_dn=False):
super().__init__()
if isinstance(state_dim, int):
if if_use_dn:
nn_dense = DenseNet(mid_dim)
inp_dim = nn_dense.inp_dim
out_dim = nn_dense.out_dim
self.net = nn.Sequential(nn.Linear(state_dim, inp_dim), nn.ReLU(),
nn_dense,
nn.Linear(out_dim, action_dim), )
else:
self.net = nn.Sequential(nn.Linear(state_dim, mid_dim), nn.ReLU(),
nn.Linear(mid_dim, mid_dim), nn.ReLU(),
# nn.Linear(mid_dim, mid_dim), nn.Hardswish(),
nn.Linear(mid_dim, action_dim), )
else:
def set_dim(i):
return int(12 * 1.5 ** i)
self.net = nn.Sequential(NnReshape(*state_dim), # -> [batch_size, 4, 96, 96]
nn.Conv2d(state_dim[0], set_dim(0), 4, 2, bias=True), nn.LeakyReLU(),
nn.Conv2d(set_dim(0), set_dim(1), 3, 2, bias=False), nn.ReLU(),
nn.Conv2d(set_dim(1), set_dim(2), 3, 2, bias=False), nn.ReLU(),
nn.Conv2d(set_dim(2), set_dim(3), 3, 2, bias=True), nn.ReLU(),
nn.Conv2d(set_dim(3), set_dim(4), 3, 1, bias=True), nn.ReLU(),
nn.Conv2d(set_dim(4), set_dim(5), 3, 1, bias=True), nn.ReLU(),
NnReshape(-1),
nn.Linear(set_dim(5), mid_dim), nn.ReLU(),
nn.Linear(mid_dim, action_dim), )
self.a_std_log = nn.Parameter(torch.zeros((1, action_dim)) - 0.5, requires_grad=True) # trainable parameter
self.sqrt_2pi_log = np.log(np.sqrt(2 * np.pi))
layer_norm(self.net[-1], std=0.1) # output layer for action
def forward(self, state):
return self.net(state).tanh() # action
def get_action(self, state):
pi = self.get_distribution(state)
return pi.sample().tanh()
def get_distribution(self, state):
a_avg = self.net(state)
a_std = self.a_std_log.clamp(-20, 2).exp()
if state.isnan().any() or state.isinf().any():
print("state")
print(state)
if a_avg.isnan().any() or a_std.isnan().any() or a_avg.isinf().any() or a_std.isinf().any():
print(a_avg)
return Normal(loc=a_avg, scale=(a_std))
def compute_logprob(self, state, action):
action = action.atanh()
pi = self.get_distribution(state)
delta = ((pi.loc - action) / pi.scale).pow(2).__mul__(0.5) # __mul__(0.5) is * 0.5
log_prob1 = -(self.a_std_log.clamp(-20, 2) + self.sqrt_2pi_log + delta).sum(1)
# log_prob2=pi.log_prob(action).sum(dim=1)
return log_prob1
class ActorDiscretePPO(nn.Module):
def __init__(self, mid_dim, state_dim, action_dim, if_use_dn=False):
super().__init__()
self.state_dim = state_dim
self.action_dim = action_dim
self.net = nn.Sequential(nn.Linear(state_dim, mid_dim), nn.ReLU(),
nn.Linear(mid_dim, mid_dim), nn.ReLU(), )
self.net_a_prob = nn.Linear(mid_dim, self.action_dim)
layer_norm(self.net_a_prob, std=0.1) # output layer for action
self.softmax = torch.nn.Softmax(dim=1)
def forward(self, state):
tmp = self.net(state)
d_a_prob = self.softmax(self.net_a_prob(tmp))
return d_a_prob.argmax(dim=1) # action
def get_action(self, state):
pi = self.get_distribution(state)
return pi.sample()
def get_distribution(self, state):
tmp = self.net(state)
da_prob = self.softmax(self.net_a_prob(tmp))
return Categorical(da_prob)
def compute_logprob(self, state, action):
action = action.squeeze()
pi = self.get_distribution(state)
d_log_prob = pi.log_prob(action)
return d_log_prob
class ActorSADPPO(nn.Module):
def __init__(self, mid_dim, state_dim, action_dim, if_use_dn=False):
super().__init__()
self.state_dim = state_dim
self.action_dim = action_dim
self.mid_dim = mid_dim
self.sp_a_num = nn.Parameter(torch.Tensor([3 for _ in range(self.action_dim)]), requires_grad=False)
self.net = nn.Sequential(nn.Linear(state_dim, mid_dim), nn.ReLU(),
nn.Linear(mid_dim, mid_dim), nn.ReLU(), )
self.net_a_prob = nn.Sequential(*[nn.Linear(mid_dim, int(self.sp_a_num[i])) for i in range(self.action_dim)])
for net_a_prob in self.net_a_prob:
layer_norm(net_a_prob, std=0.1) # output layer for action
self.softmax = torch.nn.Softmax(dim=1)
def set_sp_a_num(self, sp_a_num, device='cpu'):
self.sp_a_num = nn.Parameter(torch.Tensor(sp_a_num).to(device=device), requires_grad=False)
self.net_a_prob = nn.Sequential(
*[nn.Linear(self.mid_dim, int(self.sp_a_num[i])) for i in range(self.action_dim)])
def forward(self, state):
tmp = self.net(state)
d_a_idx = [self.softmax(net_a_prob(tmp)).argmax(dim=1) for net_a_prob in self.net_a_prob]
d_a_idx = torch.cat(d_a_idx, dim=0).unsqueeze(dim=0)
return ((d_a_idx / (self.sp_a_num - 1)) * 2 - 1) # action
def get_action(self, state):
dists = self.get_distribution(state)
sample_idx = torch.cat([dist.sample() for dist in dists], dim=0).unsqueeze(dim=0)
return ((sample_idx / (self.sp_a_num - 1)) * 2 - 1)
def get_distribution(self, state):
tmp = self.net(state)
das_prob = [self.softmax(net_a_prob(tmp)) for net_a_prob in self.net_a_prob]
# da_prob = torch.cat(da_prob, dim=1)
dist = [Categorical(da_prob) for da_prob in das_prob]
return dist
def compute_logprob(self, state, action):
action = (((self.sp_a_num - 1) * (action + 1)) / 2).round().type(torch.long)
dists = self.get_distribution(state)
d_log_probs = [dist.log_prob(action[:, i]) for i, dist in enumerate(dists)]
return sum(d_log_probs)
def _discrete_to_continous(self, input):
return
def _continous_to_discrete(self, input):
return
class ActorHybridPPO(nn.Module):
def __init__(self, mid_dim, state_dim, action_dim, if_shared=False):
super().__init__()
self.state_dim = state_dim
self.c_action_dim = action_dim[0]
self.d_action_dim = action_dim[1]
self.if_shared = if_shared
if self.if_shared:
self.net = nn.Sequential(nn.Linear(state_dim, mid_dim), nn.ReLU(),
nn.Linear(mid_dim, mid_dim), nn.ReLU(), )
self.net_ca_avg = nn.Linear(mid_dim, self.c_action_dim)
self.net_da = nn.Linear(mid_dim, self.d_action_dim)
layer_norm(self.net_ca_avg, std=0.1) # output layer for action
else:
self.net_ca_avg = nn.Sequential(nn.Linear(state_dim, mid_dim), nn.ReLU(),
nn.Linear(mid_dim, mid_dim), nn.ReLU(),
nn.Linear(mid_dim, self.c_action_dim))
self.net_da = nn.Sequential(nn.Linear(state_dim, mid_dim), nn.ReLU(),
nn.Linear(mid_dim, mid_dim), nn.ReLU(),
nn.Linear(mid_dim, self.d_action_dim))
layer_norm(self.net_ca_avg[-1], std=0.1) # output layer for action
self.a_std_log = nn.Parameter(torch.zeros((1, self.c_action_dim)) - 0.5,
requires_grad=True) # trainable parameter
self.sqrt_2pi_log = np.log(np.sqrt(2 * np.pi))
self.softmax = torch.nn.Softmax(dim=1)
def forward(self, state):
if self.if_shared:
tmp = self.net(state)
c_a = self.net_ca_avg(tmp).tanh()
d_a_prob = self.softmax(self.net_da(tmp))
else:
c_a = self.net_ca_avg(state).tanh()
d_a_prob = self.softmax(self.net_da(state))
da = d_a_prob.argmax(dim=1).unsqueeze(dim=1)
return torch.cat((c_a, da), 1) # action
def get_action(self, state):
pi_c, pi_d = self.get_distribution(state)
return torch.cat((pi_c.sample().tanh(), pi_d.sample().unsqueeze(dim=1)), 1)
def get_distribution(self, state):
if self.if_shared:
tmp = self.net(state)
ca_avg = self.net_ca_avg(tmp)
da_prob = self.softmax(self.net_da(tmp))
else:
c_a = self.net_ca_avg(state).tanh()
d_a_prob = self.softmax(self.net_da(state))
ca_std = self.a_std_log.clamp(-20, 2).exp()
pi_c = Normal(loc=ca_avg, scale=(ca_std))
pi_d = Categorical(da_prob)
return pi_c, pi_d
def compute_logprob(self, state, action):
c_action = action[:, :-1].atanh()
d_action = action[:, -1]
pi_c, pi_d = self.get_distribution(state)
delta = ((pi_c.loc - c_action) / pi_c.scale).pow(2).__mul__(0.5) # __mul__(0.5) is * 0.5
c_log_prob = -(self.a_std_log.clamp(-20, 2) + self.sqrt_2pi_log + delta).sum(1)
d_log_prob = pi_d.log_prob(d_action)
return c_log_prob, d_log_prob
# class ActorPPOG(nn.Module):
# def __init__(self, mid_dim, state_dim, action_dim, if_use_dn=False):
# super().__init__()
#
# self.net = nn.Sequential(nn.Linear(state_dim, mid_dim), nn.ReLU(),
# nn.Linear(mid_dim, mid_dim), nn.ReLU(),
# # nn.Linear(mid_dim, mid_dim), nn.Hardswish(),
# )
#
# self.net_a_avg = nn.Linear(mid_dim, action_dim) # the average of action
# self.net_a_logstd = nn.Linear(mid_dim, action_dim) # the log_std of action
#
# layer_norm(self.net_a_avg, std=0.1) # output layer for action
# layer_norm(self.net_a_logstd, std=0.1)
#
# def forward(self, state):
# return self.net_a_avg(self.net(state)).tanh() # action
#
# def get_action(self, state):
# pi = self.get_distribution(state)
# return pi.sample()
#
# def get_distribution(self, state):
# tmp = self.net(state)
# a_avg = self.net_a_avg(tmp)
# a_std = self.net_a_logstd(tmp).exp()
# return Normal(loc=a_avg, scale=a_std)
#
# def compute_logprob(self, state, action):
# pi = self.get_distribution(state)
# return pi.log_prob(action).sum(dim=1)
class ActorPPOBeta(nn.Module):
def __init__(self, mid_dim, state_dim, action_dim, if_use_dn=False):
super().__init__()
self.net = nn.Sequential(nn.Linear(state_dim, mid_dim), nn.ReLU(),
nn.Linear(mid_dim, mid_dim), nn.ReLU(),
# nn.Linear(mid_dim, mid_dim), nn.Hardswish(),
)
self.net_a_alpha = nn.Linear(mid_dim, action_dim) # the average of action
self.net_a_beta = nn.Linear(mid_dim, action_dim) # the log_std of action
self.softplus = nn.Softplus(threshold=18.)
self.beta = torch.distributions.Beta
def forward(self, state):
tmp = self.net(state)
alpha = (self.softplus(self.net_a_alpha(tmp)) + 1)
beta = (self.softplus(self.net_a_beta(tmp)) + 1)
# pi = self.beta(alpha, beta)
# return pi.mean * 2. - 1
return ((alpha - 1) / (alpha + beta - 2)) * 2. - 1.
def get_action(self, state):
pi = self.get_distribution(state)
return pi.sample() * 2. - 1.
def get_distribution(self, state):
tmp = self.net(state)
alpha = self.softplus(self.net_a_alpha(tmp)) + 1
beta = self.softplus(self.net_a_beta(tmp)) + 1
return self.beta(alpha, beta)
def get_explore(self, state):
pi = self.get_distribution(state)
return (pi.concentration0 + pi.concentration1 - 2) / 2
def compute_logprob(self, state, action):
pi = self.get_distribution(state)
return pi.log_prob((action + 1.) / 2.).sum(dim=1)
# class ActorPPOBeta2(nn.Module):
# def __init__(self, mid_dim, state_dim, action_dim, if_use_dn=False):
# super().__init__()
#
# self.net = nn.Sequential(nn.Linear(state_dim, mid_dim), nn.ReLU(),
# nn.Linear(mid_dim, mid_dim), nn.ReLU(),
# # nn.Linear(mid_dim, mid_dim), nn.Hardswish(),
# )
#
# self.net_a_alpha = nn.Linear(mid_dim, action_dim) # the average of action
# self.net_a_beta = nn.Linear(mid_dim, action_dim) # the log_std of action
# self.net_a_explore = nn.Linear(mid_dim, action_dim)
# # self.a_std_log = nn.Parameter(torch.zeros((1, action_dim)) - 0.5, requires_grad=True)
#
# self.softplus = nn.Softplus(threshold=18.)
# self.sigmoid = torch.nn.Sigmoid()
# self.softmax = torch.nn.Softmax(dim=2)
# self.beta = torch.distributions.Beta
#
# self.explore_upbound = float('inf')
# self.threshold = self.beta(self.explore_upbound / 2 + 1, 1).mean
#
# def forward(self, state):
# tmp = self.net(state)
# explore = (self.softplus(self.net_a_explore(tmp))).clamp_max(self.explore_upbound)
# explore = explore.unsqueeze(dim=2).repeat(1, 1, 2)
# alpha_beta = torch.cat((self.net_a_alpha(tmp).unsqueeze(dim=2),
# self.net_a_beta(tmp).unsqueeze(dim=2)), dim=2)
# alpha_beta = self.softmax(alpha_beta) * explore + 1
# alpha = alpha_beta[:, :, 0]
# beta = alpha_beta[:, :, 1]
#
# # prob_alpha = self.sigmoid(self.net_a_alpha(tmp))
# # prob_beta = 1 - prob_alpha
# # alpha = (prob_alpha * explore) + 1
# # beta = (prob_beta * explore) + 1
#
# # alpha = (self.softplus(self.net_a_alpha(tmp)) + 1)[0].detach().flatten()
# # beta = (self.softplus(self.net_a_beta(tmp)) + 1)[0].detach().flatten()
# # def f(x, dim):
# # return -scipy_beta(alpha[dim], beta[dim]).pdf(x)
# #
# # max_prob_a = np.zeros((alpha.shape[0],))
# # for i in range(alpha.shape[0]):
# # max_prob_a[i] = fmin(func=f, x0=np.array([0.]), args=(i,), disp=0)
# # return torch.Tensor(max_prob_a).unsqueeze(dim=0) * 2. - 1.
# # return (self.beta(prob_alpha * explore + 1, prob_beta * explore + 1).mean * 2. - 1.). \
# # clamp(-self.threshold, self.threshold) / self.threshold
#
# return ((alpha - 1) / (alpha + beta - 2)) * 2. - 1.
#
# def get_action(self, state):
# pi = self.get_distribution(state)
# return pi.sample() * 2. - 1.
# # return (pi.sample() * 2. - 1.).clamp(-self.threshold, self.threshold) / self.threshold
#
# def get_distribution(self, state):
# tmp = self.net(state)
# explore = (self.softplus(self.net_a_explore(tmp))).clamp_max(self.explore_upbound)
# explore = explore.unsqueeze(dim=2).repeat(1, 1, 2)
# alpha_beta = torch.cat((self.net_a_alpha(tmp).unsqueeze(dim=2),
# self.net_a_beta(tmp).unsqueeze(dim=2)),
# dim=2) # [batch, action_dim, 2: (alpha, beta)]
# alpha_beta = self.softmax(alpha_beta) * explore + 1
# alpha = alpha_beta[:, :, 0]
# beta = alpha_beta[:, :, 1]
#
# # prob_alpha = self.sigmoid(self.net_a_alpha(tmp))
# # prob_beta = 1 - prob_alpha
# # alpha = prob_alpha * explore + 1
# # beta = prob_beta * explore + 1
# return self.beta(alpha, beta)
#
# def get_explore(self, state):
# tmp = self.net(state)
# return (self.softplus(self.net_a_explore(tmp))).clamp_max(self.explore_upbound)
#
# def compute_logprob(self, state, action):
# tmp = self.net(state)
# explore = (self.softplus(self.net_a_explore(tmp))).clamp_max(self.explore_upbound)
# explore = explore.unsqueeze(dim=2).repeat(1, 1, 2)
# alpha_beta = torch.cat((self.net_a_alpha(tmp).unsqueeze(dim=2),
# self.net_a_beta(tmp).unsqueeze(dim=2)),
# dim=2) # [batch, action_dim, 2: (alpha, beta)]
# alpha_beta = self.softmax(alpha_beta) * explore + 1
# alpha = alpha_beta[:, :, 0]
# beta = alpha_beta[:, :, 1]
# pi = self.beta(alpha, beta)
#
# # log_weights = ((explore + 1).log() - pi.log_prob(
# # (pi.concentration0 - 1) / (pi.concentration0 + pi.concentration1 - 2))).detach_()
# # return (log_weights + pi.log_prob((action + 1.) / 2.)).sum(dim=1)
# return pi.log_prob((action + 1.) / 2.).sum(dim=1)
#
# def get_entropy(self, state, action, target_entropy=None):
# tmp = self.net(state)
# explore = (self.softplus(self.net_a_explore(tmp))).clamp_max(self.explore_upbound)
# explore = explore.unsqueeze(dim=2).repeat(1, 1, 2)
# alpha_beta = torch.cat((self.net_a_alpha(tmp).unsqueeze(dim=2),
# self.net_a_beta(tmp).unsqueeze(dim=2)),
# dim=2) # [batch, action_dim, 2: (alpha, beta)]
# alpha_beta = self.softmax(alpha_beta).detach_() * explore + 1
# alpha = alpha_beta[:, :, 0]
# beta = alpha_beta[:, :, 1]
# tmp_pi = self.beta(alpha, beta)
# log_prob = tmp_pi.log_prob((action + 1.) / 2.).sum(dim=1)
# if target_entropy is not None:
# return (log_prob.exp() * log_prob - target_entropy).clamp_min(0).mean()
# else:
# return (log_prob.exp() * log_prob).mean()
#
#
class ActorPPOBeta2(nn.Module):
def __init__(self, mid_dim, state_dim, action_dim, if_use_dn=False):
super().__init__()
self.net = nn.Sequential(nn.Linear(state_dim, mid_dim), nn.ReLU(),
nn.Linear(mid_dim, mid_dim), nn.ReLU(),
# nn.Linear(mid_dim, mid_dim), nn.Hardswish(),
)
self.net_a_alpha = nn.Linear(mid_dim, action_dim) # the average of action
self.net_a_beta = nn.Linear(mid_dim, action_dim) # the log_std of action
self.a_explore = nn.Parameter(torch.zeros((1, action_dim)) + 1., requires_grad=True)
# self.a_explore_log = nn.Parameter(torch.zeros((1, action_dim)) + 1., requires_grad=True)
self.softplus = nn.Softplus(threshold=18.)
self.sigmoid = torch.nn.Sigmoid()
self.softmax = torch.nn.Softmax(dim=2)
self.beta = torch.distributions.Beta
def forward(self, state):
tmp = self.net(state)
# explore = self.a_explore_log.clamp(-16,2).exp()
explore = self.softplus(self.a_explore)
explore = explore.unsqueeze(dim=2).repeat(1, 1, 2)
alpha_beta = torch.cat((self.net_a_alpha(tmp).unsqueeze(dim=2),
self.net_a_beta(tmp).unsqueeze(dim=2)), dim=2)
alpha_beta = self.softmax(alpha_beta) * explore + 1
alpha = alpha_beta[:, :, 0]
beta = alpha_beta[:, :, 1]
max_prob = ((alpha - 1) / (alpha + beta - 2))
return max_prob * 2. - 1.
def get_action(self, state):
pi = self.get_distribution(state)
return pi.sample() * 2. - 1.
# return (pi.sample() * 2. - 1.).clamp(-self.threshold, self.threshold) / self.threshold
def get_distribution(self, state):
tmp = self.net(state)
# explore = self.a_explore_log.clamp(-16,2).exp()
explore = self.softplus(self.a_explore)
explore = explore.unsqueeze(dim=2).repeat(1, 1, 2)
alpha_beta = torch.cat((self.net_a_alpha(tmp).unsqueeze(dim=2),
self.net_a_beta(tmp).unsqueeze(dim=2)), dim=2) # [batch, action_dim, 2: (alpha, beta)]
alpha_beta = self.softmax(alpha_beta) * explore + 1
alpha = alpha_beta[:, :, 0]
beta = alpha_beta[:, :, 1]
return self.beta(alpha, beta)
def get_explore(self, state):
# return self.a_explore_log.clamp(-16,2).exp()
return self.softplus(self.a_explore)
def compute_logprob(self, state, action):
tmp = self.net(state)
# explore = self.a_explore_log.clamp(-16,2).exp()
explore = self.softplus(self.a_explore)
explore = explore.unsqueeze(dim=2).repeat(1, 1, 2)
alpha_beta = torch.cat((self.net_a_alpha(tmp).unsqueeze(dim=2),
self.net_a_beta(tmp).unsqueeze(dim=2)),
dim=2) # [batch, action_dim, 2: (alpha, beta)]
alpha_beta = self.softmax(alpha_beta) * explore + 1
alpha = alpha_beta[:, :, 0]
beta = alpha_beta[:, :, 1]
pi = self.beta(alpha, beta)
return pi.log_prob((action + 1.) / 2.).sum(dim=1)
class ActorPPOMG(nn.Module):
def __init__(self, mid_dim, state_dim, action_dim, if_use_dn=False):
super().__init__()
self.state_dim = state_dim
self.action_dim = action_dim
self.net_state = nn.Sequential(nn.Linear(state_dim, mid_dim), nn.ReLU(),
nn.Linear(mid_dim, mid_dim), nn.ReLU(), )
self.net_a_loc = nn.Linear(mid_dim, action_dim) # the average of action
self.net_a_cholesky = nn.Linear(mid_dim, (action_dim * (action_dim + 1)) // 2)
self.softplus = nn.Softplus(threshold=18.)
layer_norm(self.net_a_loc, std=0.1) # output layer for action, it is no necessary.
# layer_norm(self.net_a_cholesky, std=0.01)
def forward(self, state):
tmp = self.net_state(state)
return self.net_a_loc(tmp).tanh() # action
def get_distribution(self, state):
loc, cholesky = self.get_loc_cholesky(state)
return MultivariateNormal(loc=loc, scale_tril=cholesky)
def get_loc_cholesky(self, state):
t_tmp = self.net_state(state)
a_loc = self.net_a_loc(t_tmp) # NOTICE! it is a_loc without .tanh()
# a_cholesky_vector = self.net_a_cholesky(t_tmp).relu()
a_cholesky_vector = self.softplus(self.net_a_cholesky(t_tmp))
# cholesky_diag_index = torch.arange(self.action_dim, dtype=torch.long) + 1
# cholesky_diag_index = (cholesky_diag_index * (cholesky_diag_index + 1)) // 2 - 1
# a_cholesky_vector[:, cholesky_diag_index] = self.softplus(a_cholesky_vector[:, cholesky_diag_index])
tril_indices = torch.tril_indices(row=self.action_dim, col=self.action_dim, offset=0)
a_cholesky = torch.zeros(size=(a_loc.shape[0], self.action_dim, self.action_dim), dtype=torch.float32,
device=t_tmp.device)
a_cholesky[:, tril_indices[0], tril_indices[1]] = a_cholesky_vector
return a_loc, a_cholesky
def get_action(self, state):
pi = self.get_distribution(state)
return pi.sample().tanh() # re-parameterize
def compute_logprob(self, state, action):
action = action.atanh()
pi = self.get_distribution(state)
return pi.log_prob(action)
class ActorSAC(nn.Module):
def __init__(self, mid_dim, state_dim, action_dim, if_use_dn=False):
super().__init__()
if if_use_dn:
nn_middle = DenseNet(mid_dim // 2)
inp_dim = nn_middle.inp_dim
out_dim = nn_middle.out_dim
else:
nn_middle = nn.Sequential(nn.Linear(mid_dim, mid_dim), nn.ReLU(),
nn.Linear(mid_dim, mid_dim), nn.ReLU(), )
inp_dim = mid_dim
out_dim = mid_dim
self.net_state = nn.Sequential(nn.Linear(state_dim, inp_dim), nn.ReLU(),
nn_middle, )
self.net_a_avg = nn.Sequential(nn.Linear(out_dim, mid_dim), nn.Hardswish(),
nn.Linear(mid_dim, action_dim)) # the average of action
self.net_a_std = nn.Sequential(nn.Linear(out_dim, mid_dim), nn.Hardswish(),
nn.Linear(mid_dim, action_dim)) # the log_std of action
self.log_sqrt_2pi = np.log(np.sqrt(2 * np.pi))
self.softplus = torch.nn.Softplus()
# layer_norm(self.net_a_avg, std=0.01) # output layer for action, it is no necessary.
def forward(self, state):
tmp = self.net_state(state)
return self.net_a_avg(tmp).tanh() # action
def get_action(self, state):
dist = self.get_distribution(state)
return dist.sample().tanh() # re-parameterize
def get_distribution(self, state):
t_tmp = self.net_state(state)
a_avg = self.net_a_avg(t_tmp) # NOTICE! it is a_avg without .tanh()
a_std = self.net_a_std(t_tmp).clamp(-20, 2).exp() # todo
return Normal(a_avg, a_std)
def get_action_logprob(self, state):
t_tmp = self.net_state(state)
a_avg = self.net_a_avg(t_tmp) # NOTICE! it needs a_avg.tanh()
a_std_log = self.net_a_std(t_tmp).clamp(-20, 2) # todo (-20, 2)
a_std = a_std_log.exp()
"""add noise to action in stochastic policy"""
noise = torch.randn_like(a_avg, requires_grad=True)
a_noise = a_avg + a_std * noise
a_tan = a_noise.tanh() # action.tanh()
# Can only use above code instead of below, because the tensor need gradients here.
# a_noise = torch.normal(a_avg, a_std, requires_grad=True)
''' compute logprob according to mean and std of action (stochastic policy) '''
# # self.sqrt_2pi_log = np.log(np.sqrt(2 * np.pi))
# logprob = a_std_log + self.sqrt_2pi_log + noise.pow(2).__mul__(0.5) # noise.pow(2) * 0.5
# different from above (gradient)
logprob = a_std_log + self.log_sqrt_2pi + noise.pow(2).__mul__(0.5)
# same as below:
# from torch.distributions.normal import Normal
# logprob_noise = Normal(a_avg, a_std).logprob(a_noise)
# logprob = logprob_noise + (-a_noise_tanh.pow(2) + 1.000001).log()
# same as below:
# a_delta = (a_avg - a_noise).pow(2) /(2*a_std.pow(2))
# logprob_noise = -a_delta - a_std.log() - np.log(np.sqrt(2 * np.pi))
# logprob = logprob_noise + (-a_noise_tanh.pow(2) + 1.000001).log()
# logprob = logprob + (-a_tan.pow(2) + 1.000001).log() # fix logprob using the derivative of action.tanh()
logprob = logprob + 2. * (np.log(2.) - a_noise - self.softplus(-2. * a_noise))
# same as below:
# epsilon = 1e-6
# logprob = logprob_noise - (1 - a_noise_tanh.pow(2) + epsilon).log()
return a_tan, logprob.sum(1, keepdim=True)
# class ActorSACBeta(nn.Module):
# def __init__(self, mid_dim, state_dim, action_dim, if_use_dn=False):
# super().__init__()
# if if_use_dn: # use a DenseNet (DenseNet has both shallow and deep linear layer)
# nn_dense = DenseNet(mid_dim)
# inp_dim = nn_dense.inp_dim
# out_dim = nn_dense.out_dim
#
# self.net_state = nn.Sequential(nn.Linear(state_dim, inp_dim), nn.ReLU(),
# nn_dense, )
# else: # use a simple network. Deeper network does not mean better performance in RL.
# self.net_state = nn.Sequential(nn.Linear(state_dim, mid_dim), nn.ReLU(),
# nn.Linear(mid_dim, mid_dim), nn.Hardswish(),
# nn.Linear(mid_dim, mid_dim), nn.Hardswish(), )
# out_dim = mid_dim
#
# self.net_a_alpha = nn.Linear(out_dim, action_dim) # the average of action
# self.net_a_beta = nn.Linear(out_dim, action_dim) # the log_std of action
#
# self.softplus = nn.Softplus(threshold=18.)
# self.beta = torch.distributions.Beta
# # layer_norm(self.net_a_avg, std=0.01) # output layer for action, it is no necessary.
#
# def forward(self, state):
# tmp = self.net_state(state)
# alpha = self.softplus(self.net_a_alpha(tmp)) + 1
# beta = self.softplus(self.net_a_beta(tmp)) + 1
# if alpha.any() > 1000 or beta.any() > 1000:
# print()
# return self.beta(alpha, beta).mean * 2. - 1.
#
# def get_action(self, state):
# pi = self.get_distribution(state)
# return pi.sample() * 2. - 1.
#
# def get_distribution(self, state):
# tmp = self.net_state(state)
# alpha = self.softplus(self.net_a_alpha(tmp)) + 1
# beta = self.softplus(self.net_a_beta(tmp)) + 1
# return self.beta(alpha, beta)
#
# def get_action_logprob(self, state):
# pi = self.get_distribution(state)
# origin_action = pi.rsample()
# log_prob = pi.log_prob(origin_action) + (-origin_action.pow(2) + 1.000001).log()
# return origin_action * 2. - 1., log_prob.sum(1, keepdim=True)
# class ActorSACBeta2(nn.Module):
# def __init__(self, mid_dim, state_dim, action_dim, if_use_dn=False):
# super().__init__()
#
# self.net = nn.Sequential(nn.Linear(state_dim, mid_dim), nn.ReLU(),
# # nn.Linear(mid_dim, mid_dim), nn.ReLU(),
# nn.Linear(mid_dim, mid_dim), nn.Hardswish(),
# nn.Linear(mid_dim, mid_dim), nn.Hardswish(),
# )
#
# self.net_a_alpha = nn.Linear(mid_dim, action_dim) # the average of action
# self.net_a_beta = nn.Linear(mid_dim, action_dim) # the log_std of action
# self.net_a_explore = nn.Linear(mid_dim, action_dim)
#
# self.softplus = nn.Softplus(threshold=18.)
# self.sigmoid = torch.nn.Sigmoid()
# self.softmax = torch.nn.Softmax(dim=2)
# self.beta = torch.distributions.Beta
# self.threshold = 0.95
# self.explore_upbound = 30.
#
# def forward(self, state):
# tmp = self.net(state)
# explore = self.softplus(self.net_a_explore(tmp)).clamp_max(self.explore_upbound) + 1
# # explore = explore.unsqueeze(dim=2).repeat(1, 1, 2)[0, 1, :]
# # alpha_beta = torch.cat((self.net_a_alpha(tmp).unsqueeze(dim=2),
# # self.net_a_beta(tmp).unsqueeze(dim=2)), dim=2)
# # alpha_beta = self.softmax(alpha_beta) * explore + 1
# # return self.beta(alpha_beta[:, :, 0], alpha_beta[:, :, 1]).mean * 2. - 1.
# prob_alpha = self.sigmoid(self.net_a_alpha(tmp))
# prob_beta = 1 - prob_alpha
# return (self.beta(prob_alpha * explore + 1, prob_beta * explore + 1).mean * 2. - 1.). \
# clamp(-self.threshold, self.threshold) / self.threshold
#
# def get_action(self, state):
# pi = self.get_distribution(state)
# return (pi.sample() * 2. - 1.).clamp(-self.threshold, self.threshold) / self.threshold
#
# def get_distribution(self, state):
# tmp = self.net(state)
# explore = self.softplus(self.net_a_explore(tmp)).clamp_max(self.explore_upbound) + 1
# # explore = explore.unsqueeze(dim=2).repeat(1, 1, 2)[0, 1, :]
# # alpha_beta = torch.cat((self.net_a_alpha(tmp).unsqueeze(dim=2),
# # self.net_a_beta(tmp).unsqueeze(dim=2)),
# # dim=2) # [batch, action_dim, 2: (alpha, beta)]
# # alpha_beta = self.softmax(alpha_beta) * explore + 1
# # return self.beta(alpha_beta[:, :, 0], alpha_beta[:, :, 1])
# prob_alpha = self.sigmoid(self.net_a_alpha(tmp))
# prob_beta = 1 - prob_alpha
# return self.beta(prob_alpha * explore + 1, prob_beta * explore + 1)
#
# def get_explore(self, state):
# tmp = self.net(state)
# return self.softplus(self.net_a_explore(tmp)).clamp_max(self.explore_upbound) + 2
#
# def compute_logprob(self, state, action):
# pi = self.get_distribution(state)
# return pi.log_prob((action * self.threshold + 1.) / 2.).sum(dim=1)
#
# def get_action_logprob(self, state):
# pi = self.get_distribution(state)
# origin_action = pi.rsample()
# log_prob = pi.log_prob(origin_action * self.threshold) + (-origin_action.pow(2) + 1.000001).log()
# return origin_action * 2. - 1., log_prob.sum(1, keepdim=True)
# class ActorHybridSAC(nn.Module):
# def __init__(self, mid_dim, state_dim, action_dim, if_use_dn=False):
# super().__init__()
# self.action_dim = action_dim
# self.net_state = nn.Sequential(nn.Linear(state_dim, mid_dim), nn.ReLU(),
# nn.Linear(mid_dim, mid_dim), nn.Hardswish(),
# nn.Linear(mid_dim, mid_dim), nn.Hardswish())
# out_dim = mid_dim
#
# self.net_ca_avg = nn.Linear(out_dim, action_dim[0]) # the average of action
# self.net_ca_std = nn.Linear(out_dim, action_dim[0]) # the log_std of action
#
# self.net_da = nn.Linear(out_dim, action_dim[1])
# self.soft_max = nn.Softmax(dim=-1)
# self.Categorical = torch.distributions.Categorical
#
# self.sqrt_2pi_log = np.log(np.sqrt(2 * np.pi))
# layer_norm(self.net_ca_avg, std=0.01) # output layer for action, it is no necessary.
#
# def forward(self, state):
# tmp = self.net_state(state)
# da_prob = self.soft_max(self.net_da(tmp))
# da = da_prob.argmax(dim=1).unsqueeze(dim=1)
# return torch.cat((self.net_ca_avg(tmp).tanh(), da), 1) # action
#
# def get_action(self, state):
# t_tmp = self.net_state(state)
# a_avg = self.net_ca_avg(t_tmp) # NOTICE! it is a_avg without .tanh()
# a_std = self.net_ca_std(t_tmp).clamp(-20, 2).exp() # todo
# da_prob = self.soft_max(self.net_da(t_tmp))
# da_sample = torch.multinomial(da_prob, num_samples=1, replacement=True).squeeze(dim=1)
# return torch.cat((torch.normal(a_avg, a_std).tanh()[0], da_sample), 0) # re-parameterize
#
# def get_action_logprob(self, state):
# t_tmp = self.net_state(state)
# a_avg = self.net_ca_avg(t_tmp) # NOTICE! it needs a_avg.tanh()
# a_std_log = self.net_ca_std(t_tmp).clamp(-20, 2) # todo (-20, 2)
# a_std = a_std_log.exp()
# da_prob = self.soft_max(self.net_da(t_tmp))
#
# """add noise to action in stochastic policy"""
# noise = torch.randn_like(a_avg, requires_grad=True)
# action = a_avg + a_std * noise
# a_tan = action.tanh() # action.tanh()
# '''compute logprob according to mean and std of action (stochastic policy)'''
# delta = ((a_avg - action) / a_std).pow(2).__mul__(0.5)
# logprob = a_std_log + self.sqrt_2pi_log + delta
# logprob = logprob + (-a_tan.pow(2) + 1.000001).log() # fix logprob using the derivative of action.tanh()
#
# # da = torch.multinomial(da_prob, num_samples=1, replacement=True).squeeze(dim=1)
# da = da_prob.argmax(dim=1)
# da_onehot = torch.nn.functional.one_hot(da, self.action_dim[1])
# return torch.cat((torch.normal(a_avg, a_std).tanh(), da_onehot), 1), \
# logprob.sum(1, keepdim=True) + da_prob.max(dim=1)[0].unsqueeze(dim=1).log()
# # return torch.cat((torch.normal(a_avg, a_std).tanh(), da_prob), 1), \
# # logprob.sum(1, keepdim=True) * da_prob.max(dim=1)[0].unsqueeze(dim=1)
# class ActorSACMG(nn.Module):
# def __init__(self, mid_dim, state_dim, action_dim, if_use_dn=False):
# super().__init__()
# self.state_dim = state_dim
# self.action_dim = action_dim
# self.net_state = nn.Sequential(nn.Linear(state_dim, mid_dim), nn.ReLU(),
# nn.Linear(mid_dim, mid_dim), nn.Hardswish(),
# nn.Linear(mid_dim, mid_dim), nn.Hardswish())
#
# self.net_a_loc = nn.Linear(mid_dim, action_dim) # the average of action
# self.net_a_cholesky = nn.Linear(mid_dim, (action_dim * (action_dim + 1)) // 2)
# self.softplus = nn.Softplus(threshold=18.)
# layer_norm(self.net_a_loc, std=0.01) # output layer for action, it is no necessary.
# # layer_norm(self.net_a_cholesky, std=0.01)
#
# def forward(self, state):
# return self.net_a_loc(self.net_state(state)).tanh() # action
#
# def get_distribution(self, state):
# loc, cholesky = self.get_loc_cholesky(state)
# return MultivariateNormal(loc=loc, scale_tril=cholesky)
#
# def get_loc_cholesky(self, state):
# t_tmp = self.net_state(state)
# a_loc = self.net_a_loc(t_tmp) # NOTICE! it is a_loc without .tanh()
# # a_cholesky_vector = self.softplus(self.net_a_cholesky(t_tmp).relu())
# a_cholesky_vector = self.softplus(self.net_a_cholesky(t_tmp))
# # a_cholesky_vector = self.net_a_cholesky(t_tmp).clamp(-20, 2).exp()
# # cholesky_diag_index = torch.arange(self.action_dim, dtype=torch.long) + 1
# # cholesky_diag_index = (cholesky_diag_index * (cholesky_diag_index + 1)) // 2 - 1
# # a_cholesky_vector[:, cholesky_diag_index] = self.softplus(a_cholesky_vector[:, cholesky_diag_index])
# tril_indices = torch.tril_indices(row=self.action_dim, col=self.action_dim, offset=0)
# a_cholesky = torch.zeros(size=(a_loc.shape[0], self.action_dim, self.action_dim), dtype=torch.float32,
# device=t_tmp.device)
# a_cholesky[:, tril_indices[0], tril_indices[1]] = a_cholesky_vector
# return a_loc, a_cholesky
#
# def get_action(self, state):
# pi = self.get_distribution(state)
# return pi.sample().tanh() # re-parameterize
#
# def get_action_logprob(self, state):
# a_loc, a_cholesky = self.get_loc_cholesky(state)
# pi = MultivariateNormal(loc=a_loc, scale_tril=a_cholesky)
# action = pi.rsample()
# a_tan = action.tanh()
# log_prob = pi.log_prob(action).unsqueeze(im=1) + (-a_tan.pow(2) + 1.000001).log()
# return a_tan, log_prob.mean(dim=1, keepdim=True)
# class ActorMPO(nn.Module):
# def __init__(self, mid_dim, state_dim, action_dim, if_use_dn=False):
# super().__init__()
# self.action_dim = action_dim
# self.device = torch.device("cuda" if torch.cuda.is_available() else "cpu")
# if if_use_dn: # use a DenseNet (DenseNet has both shallow and deep linear layer)
# nn_dense_net = DenseNet(mid_dim)
# self.net_state = nn.Sequential(nn.Linear(state_dim, mid_dim), nn.ReLU(),
# nn_dense_net, )
# lay_dim = nn_dense_net.out_dim
# else: # use a simple network. Deeper network does not mean better performance in RL.
# lay_dim = mid_dim
# self.net_state = nn.Sequential(nn.Linear(state_dim, mid_dim), nn.ReLU(),
# nn.Linear(mid_dim, lay_dim), nn.ReLU(),
# nn.Linear(mid_dim, lay_dim), nn.ReLU())
# # nn.Linear(mid_dim, lay_dim), nn.Hardswish())
# self.net_a_avg = nn.Linear(lay_dim, action_dim) # the average of action
# self.cholesky_layer = nn.Linear(lay_dim, (action_dim * (action_dim + 1)) // 2)
#
# layer_norm(self.net_a_avg, std=0.01) # output layer for action, it is no necessary.
# layer_norm(self.cholesky_layer, std=0.01)
#
# def forward(self, state):
# return self.net_a_avg(self.net_state(state)).tanh() # action
#
# def get_distribution(self, state):
# a_avg, cholesky = self.get_loc_cholesky(state)
# return MultivariateNormal(loc=a_avg, scale_tril=cholesky)
#
# def get_loc_cholesky(self, state):
# t_tmp = self.net_state(state)
# a_avg = self.net_a_avg(t_tmp) # NOTICE! it is a_avg without .tanh()
# cholesky_vector = self.cholesky_layer(t_tmp)
# cholesky_diag_index = torch.arange(self.action_dim, dtype=torch.long) + 1
# cholesky_diag_index = (cholesky_diag_index * (cholesky_diag_index + 1)) // 2 - 1
# cholesky_vector[:, cholesky_diag_index] = F.softplus(cholesky_vector[:, cholesky_diag_index])
# tril_indices = torch.tril_indices(row=self.action_dim, col=self.action_dim, offset=0)
# cholesky = torch.zeros(size=(a_avg.shape[0], self.action_dim, self.action_dim), dtype=torch.float32,
# device=t_tmp.device)
# cholesky[:, tril_indices[0], tril_indices[1]] = cholesky_vector
# return a_avg, cholesky
#
# def get_action(self, state):
# pi_action = self.get_distribution(state)
# return pi_action.sample().tanh() # re-parameterize
#
# def get_actions(self, state, sampled_actions_num):
# pi_action = self.get_distribution(state)
# return pi_action.sample((sampled_actions_num,)).tanh()
'''Value Network (Critic)'''
class Critic(nn.Module):
def __init__(self, mid_dim, state_dim, action_dim):
super().__init__()
self.net = nn.Sequential(nn.Linear(state_dim + action_dim, mid_dim), nn.ReLU(),
nn.Linear(mid_dim, mid_dim), nn.ReLU(),
nn.Linear(mid_dim, 1))
def forward(self, state, action):
return self.net(torch.cat((state, action), dim=1)) # Q value
class CriticAdv(nn.Module):
def __init__(self, state_dim, mid_dim, if_use_dn=False):
super().__init__()
if isinstance(state_dim, int):
if if_use_dn:
nn_dense = DenseNet(mid_dim)
inp_dim = nn_dense.inp_dim
out_dim = nn_dense.out_dim
self.net = nn.Sequential(nn.Linear(state_dim, inp_dim), nn.ReLU(),
nn_dense,
nn.Linear(out_dim, 1), )
else:
self.net = nn.Sequential(nn.Linear(state_dim, mid_dim), nn.ReLU(),
nn.Linear(mid_dim, mid_dim), nn.ReLU(),
# nn.Linear(mid_dim, mid_dim), nn.ReLU(), # nn.Hardswish(),
nn.Linear(mid_dim, 1), )
else:
def set_dim(i):
return int(12 * 1.5 ** i)
self.net = nn.Sequential(NnReshape(*state_dim), # -> [batch_size, 4, 96, 96]
nn.Conv2d(state_dim[0], set_dim(0), 4, 2, bias=True), nn.LeakyReLU(),
nn.Conv2d(set_dim(0), set_dim(1), 3, 2, bias=False), nn.ReLU(),
nn.Conv2d(set_dim(1), set_dim(2), 3, 2, bias=False), nn.ReLU(),
nn.Conv2d(set_dim(2), set_dim(3), 3, 2, bias=True), nn.ReLU(),
nn.Conv2d(set_dim(3), set_dim(4), 3, 1, bias=True), nn.ReLU(),
nn.Conv2d(set_dim(4), set_dim(5), 3, 1, bias=True), nn.ReLU(),
NnReshape(-1),
nn.Linear(set_dim(5), mid_dim), nn.ReLU(),
nn.Linear(mid_dim, 1))
layer_norm(self.net[-1], std=0.5) # output layer for Q value
def forward(self, state):
return self.net(state) # Q value
class CriticAdv_Multi(nn.Module):
def __init__(self, state_dim, mid_dim, reward_dim, if_shared=False):
super().__init__()
self.if_shared = if_shared
if self.if_shared:
self.net = nn.Sequential(nn.Linear(state_dim, mid_dim), nn.ReLU(),
# nn.Linear(mid_dim, mid_dim), nn.ReLU(),
nn.Linear(mid_dim, mid_dim), nn.ReLU(), )
self.tails = nn.ModuleList([nn.Linear(mid_dim, 1) for _ in range(reward_dim)])
[layer_norm(self.tails[i], std=0.5) for i in range(reward_dim)] # output layer for Q value
else:
self.tails = nn.ModuleList([nn.Sequential(nn.Linear(state_dim, mid_dim), nn.ReLU(),
nn.Linear(mid_dim, mid_dim), nn.ReLU(),
nn.Linear(mid_dim, 1)) for _ in range(reward_dim)])
[layer_norm(self.tails[i][-1], std=0.5) for i in range(reward_dim)]
def forward(self, state):
if self.if_shared:
tmp = self.net(state)
return torch.cat([self.tails[i](tmp) for i in range(len(self.tails))], dim=1) # Multi Q
else:
return torch.cat([self.tails[i](state) for i in range(len(self.tails))], dim=1) # Multi Q value
class CriticTwin(nn.Module):
def __init__(self, mid_dim, state_dim, action_dim, if_use_dn=False):
super().__init__()
if if_use_dn: # use DenseNet (DenseNet has both shallow and deep linear layer)
nn_middle = DenseNet(mid_dim)
inp_dim = nn_middle.inp_dim
out_dim = nn_middle.out_dim
else: # use a simple network for actor. Deeper network does not mean better performance in RL.
nn_middle = nn.Sequential(nn.Linear(mid_dim, mid_dim), nn.ReLU(),
nn.Linear(mid_dim, mid_dim), nn.ReLU())
inp_dim = mid_dim
out_dim = mid_dim
self.net_sa = nn.Sequential(nn.Linear(state_dim + action_dim, inp_dim), nn.ReLU(),
nn_middle, ) # concat(state, action)
self.net_q1 = nn.Sequential(nn.Linear(out_dim, mid_dim), nn.Hardswish(),
nn.Linear(mid_dim, 1)) # q1 value
self.net_q2 = nn.Sequential(nn.Linear(out_dim, mid_dim), nn.Hardswish(),
nn.Linear(mid_dim, 1)) # q2 value
def forward(self, state, action):
tmp = self.net_sa(torch.cat((state, action), dim=1))
return self.net_q1(tmp) # one Q value
def get_q1_q2(self, state, action):
tmp = self.net_sa(torch.cat((state, action), dim=1))
return self.net_q1(tmp), self.net_q2(tmp) # two Q values
class HybridCriticTwin(nn.Module):
def __init__(self, mid_dim, state_dim, action_dim, if_use_dn=False):
super().__init__()
if if_use_dn: # use DenseNet (DenseNet has both shallow and deep linear layer)
nn_dense = DenseNet(mid_dim)
inp_dim = nn_dense.inp_dim
out_dim = nn_dense.out_dim
self.net_sa = nn.Sequential(nn.Linear(state_dim + action_dim[0] + action_dim[1], inp_dim), nn.ReLU(),
nn_dense, ) # state-action value function
else: # use a simple network for actor. Deeper network does not mean better performance in RL.
self.net_sa = nn.Sequential(nn.Linear(state_dim + action_dim[0] + action_dim[1], mid_dim), nn.ReLU(),
nn.Linear(mid_dim, mid_dim), nn.ReLU(),
nn.Linear(mid_dim, mid_dim), nn.ReLU())
out_dim = mid_dim
self.net_q1 = nn.Linear(out_dim, 1)
self.net_q2 = nn.Linear(out_dim, 1)
layer_norm(self.net_q1, std=0.1)
layer_norm(self.net_q2, std=0.1)
def forward(self, state, action):
tmp = self.net_sa(torch.cat((state, action), dim=1))
return self.net_q1(tmp) # one Q value
def get_q1_q2(self, state, action):
tmp = self.net_sa(torch.cat((state, action), dim=1))
return self.net_q1(tmp), self.net_q2(tmp) # two Q values
"""utils"""
class NnReshape(nn.Module):
def __init__(self, *args):
super().__init__()
self.args = args
def forward(self, x):
return x.view((x.size(0),) + self.args)
class DenseNet(nn.Module): # plan to hyper-param: layer_number
def __init__(self, mid_dim):
super().__init__()
self.dense1 = nn.Sequential(nn.Linear(mid_dim // 2, mid_dim // 2), nn.Hardswish())
self.dense2 = nn.Sequential(nn.Linear(mid_dim * 1, mid_dim * 1), nn.Hardswish())
self.inp_dim = mid_dim // 2
self.out_dim = mid_dim * 2
def forward(self, x1): # x1.shape == (-1, mid_dim // 2)
x2 = torch.cat((x1, self.dense1(x1)), dim=1)
x3 = torch.cat((x2, self.dense2(x2)), dim=1)
return x3 # x3.shape == (-1, mid_dim * 2)
class ConcatNet(nn.Module): # concatenate
def __init__(self, mid_dim):
super().__init__()
self.dense1 = nn.Sequential(nn.Linear(mid_dim, mid_dim), nn.ReLU(),
nn.Linear(mid_dim, mid_dim), nn.ReLU(), )
self.dense2 = nn.Sequential(nn.Linear(mid_dim, mid_dim), nn.ReLU(),
nn.Linear(mid_dim, mid_dim), nn.ReLU(), )
self.dense3 = nn.Sequential(nn.Linear(mid_dim, mid_dim), nn.ReLU(),
nn.Linear(mid_dim, mid_dim), nn.ReLU(), )
self.dense4 = nn.Sequential(nn.Linear(mid_dim, mid_dim), nn.ReLU(),
nn.Linear(mid_dim, mid_dim), nn.ReLU(), )
self.out_dim = mid_dim * 4
def forward(self, x0):
x1 = self.dense1(x0)
x2 = self.dense2(x0)
x3 = self.dense3(x0)
x4 = self.dense4(x0)
return torch.cat((x1, x2, x3, x4), dim=1)
def layer_norm(layer, std=1.0, bias_const=1e-6):
torch.nn.init.orthogonal_(layer.weight, std)
torch.nn.init.constant_(layer.bias, bias_const)
def lstm_layer_norm(layer, hidden_size):
# std = 0.1
# torch.nn.init.orthogonal(layer.weight_ih, std)
# torch.nn.init.orthogonal(layer.weight_hh, std)
# from torch.nn import Parameter
# layer.bias_ih = Parameter(torch.zeros_like(layer.bias_ih))
# layer.bias_hh = Parameter(torch.zeros_like(layer.bias_hh))
k = np.sqrt(1 / hidden_size)
torch.nn.init.uniform_(layer.weight_ih, -k, k)
torch.nn.init.uniform_(layer.weight_hh, -k, k)
torch.nn.init.uniform_(layer.bias_ih, -k, k)
torch.nn.init.uniform_(layer.bias_hh, -k, k)
| 48.612748
| 119
| 0.600324
| 11,432
| 80,843
| 3.981368
| 0.032015
| 0.040075
| 0.031902
| 0.040602
| 0.903416
| 0.880062
| 0.854334
| 0.841898
| 0.82107
| 0.802373
| 0
| 0.017494
| 0.268186
| 80,843
| 1,662
| 120
| 48.641998
| 0.751834
| 0.331321
| 0
| 0.724653
| 0
| 0
| 0.000151
| 0
| 0
| 0
| 0
| 0.000602
| 0
| 1
| 0.139808
| false
| 0
| 0.007471
| 0.01174
| 0.284952
| 0.003202
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 7
|
fb350e85aaccb1fefa233cdfe784a3576507d4ea
| 45,062
|
py
|
Python
|
sdk/python/pulumi_wavefront/alert.py
|
pulumi/pulumi-wavefront
|
1d199d386ee241fa2ef94553e6cae1359ec9ccf6
|
[
"ECL-2.0",
"Apache-2.0"
] | 1
|
2022-02-20T09:48:33.000Z
|
2022-02-20T09:48:33.000Z
|
sdk/python/pulumi_wavefront/alert.py
|
pulumi/pulumi-wavefront
|
1d199d386ee241fa2ef94553e6cae1359ec9ccf6
|
[
"ECL-2.0",
"Apache-2.0"
] | 40
|
2020-08-12T08:37:24.000Z
|
2022-03-31T15:51:17.000Z
|
sdk/python/pulumi_wavefront/alert.py
|
pulumi/pulumi-wavefront
|
1d199d386ee241fa2ef94553e6cae1359ec9ccf6
|
[
"ECL-2.0",
"Apache-2.0"
] | null | null | null |
# coding=utf-8
# *** WARNING: this file was generated by the Pulumi Terraform Bridge (tfgen) Tool. ***
# *** Do not edit by hand unless you're certain you know what you are doing! ***
import warnings
import pulumi
import pulumi.runtime
from typing import Any, Mapping, Optional, Sequence, Union, overload
from . import _utilities
__all__ = ['AlertArgs', 'Alert']
@pulumi.input_type
class AlertArgs:
def __init__(__self__, *,
minutes: pulumi.Input[int],
tags: pulumi.Input[Sequence[pulumi.Input[str]]],
additional_information: Optional[pulumi.Input[str]] = None,
alert_type: Optional[pulumi.Input[str]] = None,
can_modifies: Optional[pulumi.Input[Sequence[pulumi.Input[str]]]] = None,
can_views: Optional[pulumi.Input[Sequence[pulumi.Input[str]]]] = None,
condition: Optional[pulumi.Input[str]] = None,
conditions: Optional[pulumi.Input[Mapping[str, pulumi.Input[str]]]] = None,
display_expression: Optional[pulumi.Input[str]] = None,
name: Optional[pulumi.Input[str]] = None,
notification_resend_frequency_minutes: Optional[pulumi.Input[int]] = None,
resolve_after_minutes: Optional[pulumi.Input[int]] = None,
severity: Optional[pulumi.Input[str]] = None,
target: Optional[pulumi.Input[str]] = None,
threshold_targets: Optional[pulumi.Input[Mapping[str, pulumi.Input[str]]]] = None):
"""
The set of arguments for constructing a Alert resource.
:param pulumi.Input[int] minutes: The number of consecutive minutes that a series matching the condition query must
evaluate to "true" (non-zero value) before the alert fires.
:param pulumi.Input[Sequence[pulumi.Input[str]]] tags: A set of tags to assign to this resource.
:param pulumi.Input[str] additional_information: User-supplied additional explanatory information for this alert.
Useful for linking runbooks, migrations...etc
:param pulumi.Input[str] alert_type: The type of alert in Wavefront. Either `CLASSIC` (default)
or `THRESHOLD`
:param pulumi.Input[Sequence[pulumi.Input[str]]] can_modifies: A list of users or groups that can modify this resource.
:param pulumi.Input[Sequence[pulumi.Input[str]]] can_views: A list of users or groups that can view this resource.
:param pulumi.Input[str] condition: A Wavefront query that is evaluated at regular intervals (default 1m).
The alert fires and notifications are triggered when data series matching this query evaluates
to a non-zero value for a set number of consecutive minutes.
:param pulumi.Input[Mapping[str, pulumi.Input[str]]] conditions: a string->string map of `severity` to `condition`
for which this alert will trigger.
:param pulumi.Input[str] display_expression: A second query whose results are displayed in the alert user
interface instead of the condition query. This field is often used to display a version
of the condition query with Boolean operators removed so that numerical values are plotted.
:param pulumi.Input[str] name: The name of the alert as it is displayed in Wavefront.
:param pulumi.Input[int] notification_resend_frequency_minutes: How often to re-trigger a continually failing alert.
If absent or <= 0, no re-triggering occur.
:param pulumi.Input[int] resolve_after_minutes: The number of consecutive minutes that a firing series matching the condition
query must evaluate to "false" (zero value) before the alert resolves. When unset, this default sto
the same value as `minutes`.
:param pulumi.Input[str] severity: - Severity of the alert, valid values are `INFO`, `SMOKE`, `WARN`, `SEVERE`.
:param pulumi.Input[str] target: A comma-separated list of the email address or integration endpoint
(such as PagerDuty or web hook) to notify when the alert status changes.
:param pulumi.Input[Mapping[str, pulumi.Input[str]]] threshold_targets: Targets for severity
"""
pulumi.set(__self__, "minutes", minutes)
pulumi.set(__self__, "tags", tags)
if additional_information is not None:
pulumi.set(__self__, "additional_information", additional_information)
if alert_type is not None:
pulumi.set(__self__, "alert_type", alert_type)
if can_modifies is not None:
pulumi.set(__self__, "can_modifies", can_modifies)
if can_views is not None:
pulumi.set(__self__, "can_views", can_views)
if condition is not None:
pulumi.set(__self__, "condition", condition)
if conditions is not None:
pulumi.set(__self__, "conditions", conditions)
if display_expression is not None:
pulumi.set(__self__, "display_expression", display_expression)
if name is not None:
pulumi.set(__self__, "name", name)
if notification_resend_frequency_minutes is not None:
pulumi.set(__self__, "notification_resend_frequency_minutes", notification_resend_frequency_minutes)
if resolve_after_minutes is not None:
pulumi.set(__self__, "resolve_after_minutes", resolve_after_minutes)
if severity is not None:
pulumi.set(__self__, "severity", severity)
if target is not None:
pulumi.set(__self__, "target", target)
if threshold_targets is not None:
pulumi.set(__self__, "threshold_targets", threshold_targets)
@property
@pulumi.getter
def minutes(self) -> pulumi.Input[int]:
"""
The number of consecutive minutes that a series matching the condition query must
evaluate to "true" (non-zero value) before the alert fires.
"""
return pulumi.get(self, "minutes")
@minutes.setter
def minutes(self, value: pulumi.Input[int]):
pulumi.set(self, "minutes", value)
@property
@pulumi.getter
def tags(self) -> pulumi.Input[Sequence[pulumi.Input[str]]]:
"""
A set of tags to assign to this resource.
"""
return pulumi.get(self, "tags")
@tags.setter
def tags(self, value: pulumi.Input[Sequence[pulumi.Input[str]]]):
pulumi.set(self, "tags", value)
@property
@pulumi.getter(name="additionalInformation")
def additional_information(self) -> Optional[pulumi.Input[str]]:
"""
User-supplied additional explanatory information for this alert.
Useful for linking runbooks, migrations...etc
"""
return pulumi.get(self, "additional_information")
@additional_information.setter
def additional_information(self, value: Optional[pulumi.Input[str]]):
pulumi.set(self, "additional_information", value)
@property
@pulumi.getter(name="alertType")
def alert_type(self) -> Optional[pulumi.Input[str]]:
"""
The type of alert in Wavefront. Either `CLASSIC` (default)
or `THRESHOLD`
"""
return pulumi.get(self, "alert_type")
@alert_type.setter
def alert_type(self, value: Optional[pulumi.Input[str]]):
pulumi.set(self, "alert_type", value)
@property
@pulumi.getter(name="canModifies")
def can_modifies(self) -> Optional[pulumi.Input[Sequence[pulumi.Input[str]]]]:
"""
A list of users or groups that can modify this resource.
"""
return pulumi.get(self, "can_modifies")
@can_modifies.setter
def can_modifies(self, value: Optional[pulumi.Input[Sequence[pulumi.Input[str]]]]):
pulumi.set(self, "can_modifies", value)
@property
@pulumi.getter(name="canViews")
def can_views(self) -> Optional[pulumi.Input[Sequence[pulumi.Input[str]]]]:
"""
A list of users or groups that can view this resource.
"""
return pulumi.get(self, "can_views")
@can_views.setter
def can_views(self, value: Optional[pulumi.Input[Sequence[pulumi.Input[str]]]]):
pulumi.set(self, "can_views", value)
@property
@pulumi.getter
def condition(self) -> Optional[pulumi.Input[str]]:
"""
A Wavefront query that is evaluated at regular intervals (default 1m).
The alert fires and notifications are triggered when data series matching this query evaluates
to a non-zero value for a set number of consecutive minutes.
"""
return pulumi.get(self, "condition")
@condition.setter
def condition(self, value: Optional[pulumi.Input[str]]):
pulumi.set(self, "condition", value)
@property
@pulumi.getter
def conditions(self) -> Optional[pulumi.Input[Mapping[str, pulumi.Input[str]]]]:
"""
a string->string map of `severity` to `condition`
for which this alert will trigger.
"""
return pulumi.get(self, "conditions")
@conditions.setter
def conditions(self, value: Optional[pulumi.Input[Mapping[str, pulumi.Input[str]]]]):
pulumi.set(self, "conditions", value)
@property
@pulumi.getter(name="displayExpression")
def display_expression(self) -> Optional[pulumi.Input[str]]:
"""
A second query whose results are displayed in the alert user
interface instead of the condition query. This field is often used to display a version
of the condition query with Boolean operators removed so that numerical values are plotted.
"""
return pulumi.get(self, "display_expression")
@display_expression.setter
def display_expression(self, value: Optional[pulumi.Input[str]]):
pulumi.set(self, "display_expression", value)
@property
@pulumi.getter
def name(self) -> Optional[pulumi.Input[str]]:
"""
The name of the alert as it is displayed in Wavefront.
"""
return pulumi.get(self, "name")
@name.setter
def name(self, value: Optional[pulumi.Input[str]]):
pulumi.set(self, "name", value)
@property
@pulumi.getter(name="notificationResendFrequencyMinutes")
def notification_resend_frequency_minutes(self) -> Optional[pulumi.Input[int]]:
"""
How often to re-trigger a continually failing alert.
If absent or <= 0, no re-triggering occur.
"""
return pulumi.get(self, "notification_resend_frequency_minutes")
@notification_resend_frequency_minutes.setter
def notification_resend_frequency_minutes(self, value: Optional[pulumi.Input[int]]):
pulumi.set(self, "notification_resend_frequency_minutes", value)
@property
@pulumi.getter(name="resolveAfterMinutes")
def resolve_after_minutes(self) -> Optional[pulumi.Input[int]]:
"""
The number of consecutive minutes that a firing series matching the condition
query must evaluate to "false" (zero value) before the alert resolves. When unset, this default sto
the same value as `minutes`.
"""
return pulumi.get(self, "resolve_after_minutes")
@resolve_after_minutes.setter
def resolve_after_minutes(self, value: Optional[pulumi.Input[int]]):
pulumi.set(self, "resolve_after_minutes", value)
@property
@pulumi.getter
def severity(self) -> Optional[pulumi.Input[str]]:
"""
- Severity of the alert, valid values are `INFO`, `SMOKE`, `WARN`, `SEVERE`.
"""
return pulumi.get(self, "severity")
@severity.setter
def severity(self, value: Optional[pulumi.Input[str]]):
pulumi.set(self, "severity", value)
@property
@pulumi.getter
def target(self) -> Optional[pulumi.Input[str]]:
"""
A comma-separated list of the email address or integration endpoint
(such as PagerDuty or web hook) to notify when the alert status changes.
"""
return pulumi.get(self, "target")
@target.setter
def target(self, value: Optional[pulumi.Input[str]]):
pulumi.set(self, "target", value)
@property
@pulumi.getter(name="thresholdTargets")
def threshold_targets(self) -> Optional[pulumi.Input[Mapping[str, pulumi.Input[str]]]]:
"""
Targets for severity
"""
return pulumi.get(self, "threshold_targets")
@threshold_targets.setter
def threshold_targets(self, value: Optional[pulumi.Input[Mapping[str, pulumi.Input[str]]]]):
pulumi.set(self, "threshold_targets", value)
@pulumi.input_type
class _AlertState:
def __init__(__self__, *,
additional_information: Optional[pulumi.Input[str]] = None,
alert_type: Optional[pulumi.Input[str]] = None,
can_modifies: Optional[pulumi.Input[Sequence[pulumi.Input[str]]]] = None,
can_views: Optional[pulumi.Input[Sequence[pulumi.Input[str]]]] = None,
condition: Optional[pulumi.Input[str]] = None,
conditions: Optional[pulumi.Input[Mapping[str, pulumi.Input[str]]]] = None,
display_expression: Optional[pulumi.Input[str]] = None,
minutes: Optional[pulumi.Input[int]] = None,
name: Optional[pulumi.Input[str]] = None,
notification_resend_frequency_minutes: Optional[pulumi.Input[int]] = None,
resolve_after_minutes: Optional[pulumi.Input[int]] = None,
severity: Optional[pulumi.Input[str]] = None,
tags: Optional[pulumi.Input[Sequence[pulumi.Input[str]]]] = None,
target: Optional[pulumi.Input[str]] = None,
threshold_targets: Optional[pulumi.Input[Mapping[str, pulumi.Input[str]]]] = None):
"""
Input properties used for looking up and filtering Alert resources.
:param pulumi.Input[str] additional_information: User-supplied additional explanatory information for this alert.
Useful for linking runbooks, migrations...etc
:param pulumi.Input[str] alert_type: The type of alert in Wavefront. Either `CLASSIC` (default)
or `THRESHOLD`
:param pulumi.Input[Sequence[pulumi.Input[str]]] can_modifies: A list of users or groups that can modify this resource.
:param pulumi.Input[Sequence[pulumi.Input[str]]] can_views: A list of users or groups that can view this resource.
:param pulumi.Input[str] condition: A Wavefront query that is evaluated at regular intervals (default 1m).
The alert fires and notifications are triggered when data series matching this query evaluates
to a non-zero value for a set number of consecutive minutes.
:param pulumi.Input[Mapping[str, pulumi.Input[str]]] conditions: a string->string map of `severity` to `condition`
for which this alert will trigger.
:param pulumi.Input[str] display_expression: A second query whose results are displayed in the alert user
interface instead of the condition query. This field is often used to display a version
of the condition query with Boolean operators removed so that numerical values are plotted.
:param pulumi.Input[int] minutes: The number of consecutive minutes that a series matching the condition query must
evaluate to "true" (non-zero value) before the alert fires.
:param pulumi.Input[str] name: The name of the alert as it is displayed in Wavefront.
:param pulumi.Input[int] notification_resend_frequency_minutes: How often to re-trigger a continually failing alert.
If absent or <= 0, no re-triggering occur.
:param pulumi.Input[int] resolve_after_minutes: The number of consecutive minutes that a firing series matching the condition
query must evaluate to "false" (zero value) before the alert resolves. When unset, this default sto
the same value as `minutes`.
:param pulumi.Input[str] severity: - Severity of the alert, valid values are `INFO`, `SMOKE`, `WARN`, `SEVERE`.
:param pulumi.Input[Sequence[pulumi.Input[str]]] tags: A set of tags to assign to this resource.
:param pulumi.Input[str] target: A comma-separated list of the email address or integration endpoint
(such as PagerDuty or web hook) to notify when the alert status changes.
:param pulumi.Input[Mapping[str, pulumi.Input[str]]] threshold_targets: Targets for severity
"""
if additional_information is not None:
pulumi.set(__self__, "additional_information", additional_information)
if alert_type is not None:
pulumi.set(__self__, "alert_type", alert_type)
if can_modifies is not None:
pulumi.set(__self__, "can_modifies", can_modifies)
if can_views is not None:
pulumi.set(__self__, "can_views", can_views)
if condition is not None:
pulumi.set(__self__, "condition", condition)
if conditions is not None:
pulumi.set(__self__, "conditions", conditions)
if display_expression is not None:
pulumi.set(__self__, "display_expression", display_expression)
if minutes is not None:
pulumi.set(__self__, "minutes", minutes)
if name is not None:
pulumi.set(__self__, "name", name)
if notification_resend_frequency_minutes is not None:
pulumi.set(__self__, "notification_resend_frequency_minutes", notification_resend_frequency_minutes)
if resolve_after_minutes is not None:
pulumi.set(__self__, "resolve_after_minutes", resolve_after_minutes)
if severity is not None:
pulumi.set(__self__, "severity", severity)
if tags is not None:
pulumi.set(__self__, "tags", tags)
if target is not None:
pulumi.set(__self__, "target", target)
if threshold_targets is not None:
pulumi.set(__self__, "threshold_targets", threshold_targets)
@property
@pulumi.getter(name="additionalInformation")
def additional_information(self) -> Optional[pulumi.Input[str]]:
"""
User-supplied additional explanatory information for this alert.
Useful for linking runbooks, migrations...etc
"""
return pulumi.get(self, "additional_information")
@additional_information.setter
def additional_information(self, value: Optional[pulumi.Input[str]]):
pulumi.set(self, "additional_information", value)
@property
@pulumi.getter(name="alertType")
def alert_type(self) -> Optional[pulumi.Input[str]]:
"""
The type of alert in Wavefront. Either `CLASSIC` (default)
or `THRESHOLD`
"""
return pulumi.get(self, "alert_type")
@alert_type.setter
def alert_type(self, value: Optional[pulumi.Input[str]]):
pulumi.set(self, "alert_type", value)
@property
@pulumi.getter(name="canModifies")
def can_modifies(self) -> Optional[pulumi.Input[Sequence[pulumi.Input[str]]]]:
"""
A list of users or groups that can modify this resource.
"""
return pulumi.get(self, "can_modifies")
@can_modifies.setter
def can_modifies(self, value: Optional[pulumi.Input[Sequence[pulumi.Input[str]]]]):
pulumi.set(self, "can_modifies", value)
@property
@pulumi.getter(name="canViews")
def can_views(self) -> Optional[pulumi.Input[Sequence[pulumi.Input[str]]]]:
"""
A list of users or groups that can view this resource.
"""
return pulumi.get(self, "can_views")
@can_views.setter
def can_views(self, value: Optional[pulumi.Input[Sequence[pulumi.Input[str]]]]):
pulumi.set(self, "can_views", value)
@property
@pulumi.getter
def condition(self) -> Optional[pulumi.Input[str]]:
"""
A Wavefront query that is evaluated at regular intervals (default 1m).
The alert fires and notifications are triggered when data series matching this query evaluates
to a non-zero value for a set number of consecutive minutes.
"""
return pulumi.get(self, "condition")
@condition.setter
def condition(self, value: Optional[pulumi.Input[str]]):
pulumi.set(self, "condition", value)
@property
@pulumi.getter
def conditions(self) -> Optional[pulumi.Input[Mapping[str, pulumi.Input[str]]]]:
"""
a string->string map of `severity` to `condition`
for which this alert will trigger.
"""
return pulumi.get(self, "conditions")
@conditions.setter
def conditions(self, value: Optional[pulumi.Input[Mapping[str, pulumi.Input[str]]]]):
pulumi.set(self, "conditions", value)
@property
@pulumi.getter(name="displayExpression")
def display_expression(self) -> Optional[pulumi.Input[str]]:
"""
A second query whose results are displayed in the alert user
interface instead of the condition query. This field is often used to display a version
of the condition query with Boolean operators removed so that numerical values are plotted.
"""
return pulumi.get(self, "display_expression")
@display_expression.setter
def display_expression(self, value: Optional[pulumi.Input[str]]):
pulumi.set(self, "display_expression", value)
@property
@pulumi.getter
def minutes(self) -> Optional[pulumi.Input[int]]:
"""
The number of consecutive minutes that a series matching the condition query must
evaluate to "true" (non-zero value) before the alert fires.
"""
return pulumi.get(self, "minutes")
@minutes.setter
def minutes(self, value: Optional[pulumi.Input[int]]):
pulumi.set(self, "minutes", value)
@property
@pulumi.getter
def name(self) -> Optional[pulumi.Input[str]]:
"""
The name of the alert as it is displayed in Wavefront.
"""
return pulumi.get(self, "name")
@name.setter
def name(self, value: Optional[pulumi.Input[str]]):
pulumi.set(self, "name", value)
@property
@pulumi.getter(name="notificationResendFrequencyMinutes")
def notification_resend_frequency_minutes(self) -> Optional[pulumi.Input[int]]:
"""
How often to re-trigger a continually failing alert.
If absent or <= 0, no re-triggering occur.
"""
return pulumi.get(self, "notification_resend_frequency_minutes")
@notification_resend_frequency_minutes.setter
def notification_resend_frequency_minutes(self, value: Optional[pulumi.Input[int]]):
pulumi.set(self, "notification_resend_frequency_minutes", value)
@property
@pulumi.getter(name="resolveAfterMinutes")
def resolve_after_minutes(self) -> Optional[pulumi.Input[int]]:
"""
The number of consecutive minutes that a firing series matching the condition
query must evaluate to "false" (zero value) before the alert resolves. When unset, this default sto
the same value as `minutes`.
"""
return pulumi.get(self, "resolve_after_minutes")
@resolve_after_minutes.setter
def resolve_after_minutes(self, value: Optional[pulumi.Input[int]]):
pulumi.set(self, "resolve_after_minutes", value)
@property
@pulumi.getter
def severity(self) -> Optional[pulumi.Input[str]]:
"""
- Severity of the alert, valid values are `INFO`, `SMOKE`, `WARN`, `SEVERE`.
"""
return pulumi.get(self, "severity")
@severity.setter
def severity(self, value: Optional[pulumi.Input[str]]):
pulumi.set(self, "severity", value)
@property
@pulumi.getter
def tags(self) -> Optional[pulumi.Input[Sequence[pulumi.Input[str]]]]:
"""
A set of tags to assign to this resource.
"""
return pulumi.get(self, "tags")
@tags.setter
def tags(self, value: Optional[pulumi.Input[Sequence[pulumi.Input[str]]]]):
pulumi.set(self, "tags", value)
@property
@pulumi.getter
def target(self) -> Optional[pulumi.Input[str]]:
"""
A comma-separated list of the email address or integration endpoint
(such as PagerDuty or web hook) to notify when the alert status changes.
"""
return pulumi.get(self, "target")
@target.setter
def target(self, value: Optional[pulumi.Input[str]]):
pulumi.set(self, "target", value)
@property
@pulumi.getter(name="thresholdTargets")
def threshold_targets(self) -> Optional[pulumi.Input[Mapping[str, pulumi.Input[str]]]]:
"""
Targets for severity
"""
return pulumi.get(self, "threshold_targets")
@threshold_targets.setter
def threshold_targets(self, value: Optional[pulumi.Input[Mapping[str, pulumi.Input[str]]]]):
pulumi.set(self, "threshold_targets", value)
class Alert(pulumi.CustomResource):
@overload
def __init__(__self__,
resource_name: str,
opts: Optional[pulumi.ResourceOptions] = None,
additional_information: Optional[pulumi.Input[str]] = None,
alert_type: Optional[pulumi.Input[str]] = None,
can_modifies: Optional[pulumi.Input[Sequence[pulumi.Input[str]]]] = None,
can_views: Optional[pulumi.Input[Sequence[pulumi.Input[str]]]] = None,
condition: Optional[pulumi.Input[str]] = None,
conditions: Optional[pulumi.Input[Mapping[str, pulumi.Input[str]]]] = None,
display_expression: Optional[pulumi.Input[str]] = None,
minutes: Optional[pulumi.Input[int]] = None,
name: Optional[pulumi.Input[str]] = None,
notification_resend_frequency_minutes: Optional[pulumi.Input[int]] = None,
resolve_after_minutes: Optional[pulumi.Input[int]] = None,
severity: Optional[pulumi.Input[str]] = None,
tags: Optional[pulumi.Input[Sequence[pulumi.Input[str]]]] = None,
target: Optional[pulumi.Input[str]] = None,
threshold_targets: Optional[pulumi.Input[Mapping[str, pulumi.Input[str]]]] = None,
__props__=None):
"""
Provides a Wavefront Alert resource. This allows alerts to be created, updated, and deleted.
## Example Usage
```python
import pulumi
import pulumi_wavefront as wavefront
foobar = wavefront.Alert("foobar",
condition="100-ts(\"cpu.usage_idle\", environment=preprod and cpu=cpu-total ) > 80",
display_expression="100-ts(\"cpu.usage_idle\", environment=preprod and cpu=cpu-total )",
minutes=5,
resolve_after_minutes=5,
severity="WARN",
tags=[
"terraform",
"test",
],
target="test@example.com")
```
## Import
Alerts can be imported using the `id`, e.g.
```sh
$ pulumi import wavefront:index/alert:Alert alert_target 1479868728473
```
:param str resource_name: The name of the resource.
:param pulumi.ResourceOptions opts: Options for the resource.
:param pulumi.Input[str] additional_information: User-supplied additional explanatory information for this alert.
Useful for linking runbooks, migrations...etc
:param pulumi.Input[str] alert_type: The type of alert in Wavefront. Either `CLASSIC` (default)
or `THRESHOLD`
:param pulumi.Input[Sequence[pulumi.Input[str]]] can_modifies: A list of users or groups that can modify this resource.
:param pulumi.Input[Sequence[pulumi.Input[str]]] can_views: A list of users or groups that can view this resource.
:param pulumi.Input[str] condition: A Wavefront query that is evaluated at regular intervals (default 1m).
The alert fires and notifications are triggered when data series matching this query evaluates
to a non-zero value for a set number of consecutive minutes.
:param pulumi.Input[Mapping[str, pulumi.Input[str]]] conditions: a string->string map of `severity` to `condition`
for which this alert will trigger.
:param pulumi.Input[str] display_expression: A second query whose results are displayed in the alert user
interface instead of the condition query. This field is often used to display a version
of the condition query with Boolean operators removed so that numerical values are plotted.
:param pulumi.Input[int] minutes: The number of consecutive minutes that a series matching the condition query must
evaluate to "true" (non-zero value) before the alert fires.
:param pulumi.Input[str] name: The name of the alert as it is displayed in Wavefront.
:param pulumi.Input[int] notification_resend_frequency_minutes: How often to re-trigger a continually failing alert.
If absent or <= 0, no re-triggering occur.
:param pulumi.Input[int] resolve_after_minutes: The number of consecutive minutes that a firing series matching the condition
query must evaluate to "false" (zero value) before the alert resolves. When unset, this default sto
the same value as `minutes`.
:param pulumi.Input[str] severity: - Severity of the alert, valid values are `INFO`, `SMOKE`, `WARN`, `SEVERE`.
:param pulumi.Input[Sequence[pulumi.Input[str]]] tags: A set of tags to assign to this resource.
:param pulumi.Input[str] target: A comma-separated list of the email address or integration endpoint
(such as PagerDuty or web hook) to notify when the alert status changes.
:param pulumi.Input[Mapping[str, pulumi.Input[str]]] threshold_targets: Targets for severity
"""
...
@overload
def __init__(__self__,
resource_name: str,
args: AlertArgs,
opts: Optional[pulumi.ResourceOptions] = None):
"""
Provides a Wavefront Alert resource. This allows alerts to be created, updated, and deleted.
## Example Usage
```python
import pulumi
import pulumi_wavefront as wavefront
foobar = wavefront.Alert("foobar",
condition="100-ts(\"cpu.usage_idle\", environment=preprod and cpu=cpu-total ) > 80",
display_expression="100-ts(\"cpu.usage_idle\", environment=preprod and cpu=cpu-total )",
minutes=5,
resolve_after_minutes=5,
severity="WARN",
tags=[
"terraform",
"test",
],
target="test@example.com")
```
## Import
Alerts can be imported using the `id`, e.g.
```sh
$ pulumi import wavefront:index/alert:Alert alert_target 1479868728473
```
:param str resource_name: The name of the resource.
:param AlertArgs args: The arguments to use to populate this resource's properties.
:param pulumi.ResourceOptions opts: Options for the resource.
"""
...
def __init__(__self__, resource_name: str, *args, **kwargs):
resource_args, opts = _utilities.get_resource_args_opts(AlertArgs, pulumi.ResourceOptions, *args, **kwargs)
if resource_args is not None:
__self__._internal_init(resource_name, opts, **resource_args.__dict__)
else:
__self__._internal_init(resource_name, *args, **kwargs)
def _internal_init(__self__,
resource_name: str,
opts: Optional[pulumi.ResourceOptions] = None,
additional_information: Optional[pulumi.Input[str]] = None,
alert_type: Optional[pulumi.Input[str]] = None,
can_modifies: Optional[pulumi.Input[Sequence[pulumi.Input[str]]]] = None,
can_views: Optional[pulumi.Input[Sequence[pulumi.Input[str]]]] = None,
condition: Optional[pulumi.Input[str]] = None,
conditions: Optional[pulumi.Input[Mapping[str, pulumi.Input[str]]]] = None,
display_expression: Optional[pulumi.Input[str]] = None,
minutes: Optional[pulumi.Input[int]] = None,
name: Optional[pulumi.Input[str]] = None,
notification_resend_frequency_minutes: Optional[pulumi.Input[int]] = None,
resolve_after_minutes: Optional[pulumi.Input[int]] = None,
severity: Optional[pulumi.Input[str]] = None,
tags: Optional[pulumi.Input[Sequence[pulumi.Input[str]]]] = None,
target: Optional[pulumi.Input[str]] = None,
threshold_targets: Optional[pulumi.Input[Mapping[str, pulumi.Input[str]]]] = None,
__props__=None):
if opts is None:
opts = pulumi.ResourceOptions()
if not isinstance(opts, pulumi.ResourceOptions):
raise TypeError('Expected resource options to be a ResourceOptions instance')
if opts.version is None:
opts.version = _utilities.get_version()
if opts.id is None:
if __props__ is not None:
raise TypeError('__props__ is only valid when passed in combination with a valid opts.id to get an existing resource')
__props__ = AlertArgs.__new__(AlertArgs)
__props__.__dict__["additional_information"] = additional_information
__props__.__dict__["alert_type"] = alert_type
__props__.__dict__["can_modifies"] = can_modifies
__props__.__dict__["can_views"] = can_views
__props__.__dict__["condition"] = condition
__props__.__dict__["conditions"] = conditions
__props__.__dict__["display_expression"] = display_expression
if minutes is None and not opts.urn:
raise TypeError("Missing required property 'minutes'")
__props__.__dict__["minutes"] = minutes
__props__.__dict__["name"] = name
__props__.__dict__["notification_resend_frequency_minutes"] = notification_resend_frequency_minutes
__props__.__dict__["resolve_after_minutes"] = resolve_after_minutes
__props__.__dict__["severity"] = severity
if tags is None and not opts.urn:
raise TypeError("Missing required property 'tags'")
__props__.__dict__["tags"] = tags
__props__.__dict__["target"] = target
__props__.__dict__["threshold_targets"] = threshold_targets
super(Alert, __self__).__init__(
'wavefront:index/alert:Alert',
resource_name,
__props__,
opts)
@staticmethod
def get(resource_name: str,
id: pulumi.Input[str],
opts: Optional[pulumi.ResourceOptions] = None,
additional_information: Optional[pulumi.Input[str]] = None,
alert_type: Optional[pulumi.Input[str]] = None,
can_modifies: Optional[pulumi.Input[Sequence[pulumi.Input[str]]]] = None,
can_views: Optional[pulumi.Input[Sequence[pulumi.Input[str]]]] = None,
condition: Optional[pulumi.Input[str]] = None,
conditions: Optional[pulumi.Input[Mapping[str, pulumi.Input[str]]]] = None,
display_expression: Optional[pulumi.Input[str]] = None,
minutes: Optional[pulumi.Input[int]] = None,
name: Optional[pulumi.Input[str]] = None,
notification_resend_frequency_minutes: Optional[pulumi.Input[int]] = None,
resolve_after_minutes: Optional[pulumi.Input[int]] = None,
severity: Optional[pulumi.Input[str]] = None,
tags: Optional[pulumi.Input[Sequence[pulumi.Input[str]]]] = None,
target: Optional[pulumi.Input[str]] = None,
threshold_targets: Optional[pulumi.Input[Mapping[str, pulumi.Input[str]]]] = None) -> 'Alert':
"""
Get an existing Alert resource's state with the given name, id, and optional extra
properties used to qualify the lookup.
:param str resource_name: The unique name of the resulting resource.
:param pulumi.Input[str] id: The unique provider ID of the resource to lookup.
:param pulumi.ResourceOptions opts: Options for the resource.
:param pulumi.Input[str] additional_information: User-supplied additional explanatory information for this alert.
Useful for linking runbooks, migrations...etc
:param pulumi.Input[str] alert_type: The type of alert in Wavefront. Either `CLASSIC` (default)
or `THRESHOLD`
:param pulumi.Input[Sequence[pulumi.Input[str]]] can_modifies: A list of users or groups that can modify this resource.
:param pulumi.Input[Sequence[pulumi.Input[str]]] can_views: A list of users or groups that can view this resource.
:param pulumi.Input[str] condition: A Wavefront query that is evaluated at regular intervals (default 1m).
The alert fires and notifications are triggered when data series matching this query evaluates
to a non-zero value for a set number of consecutive minutes.
:param pulumi.Input[Mapping[str, pulumi.Input[str]]] conditions: a string->string map of `severity` to `condition`
for which this alert will trigger.
:param pulumi.Input[str] display_expression: A second query whose results are displayed in the alert user
interface instead of the condition query. This field is often used to display a version
of the condition query with Boolean operators removed so that numerical values are plotted.
:param pulumi.Input[int] minutes: The number of consecutive minutes that a series matching the condition query must
evaluate to "true" (non-zero value) before the alert fires.
:param pulumi.Input[str] name: The name of the alert as it is displayed in Wavefront.
:param pulumi.Input[int] notification_resend_frequency_minutes: How often to re-trigger a continually failing alert.
If absent or <= 0, no re-triggering occur.
:param pulumi.Input[int] resolve_after_minutes: The number of consecutive minutes that a firing series matching the condition
query must evaluate to "false" (zero value) before the alert resolves. When unset, this default sto
the same value as `minutes`.
:param pulumi.Input[str] severity: - Severity of the alert, valid values are `INFO`, `SMOKE`, `WARN`, `SEVERE`.
:param pulumi.Input[Sequence[pulumi.Input[str]]] tags: A set of tags to assign to this resource.
:param pulumi.Input[str] target: A comma-separated list of the email address or integration endpoint
(such as PagerDuty or web hook) to notify when the alert status changes.
:param pulumi.Input[Mapping[str, pulumi.Input[str]]] threshold_targets: Targets for severity
"""
opts = pulumi.ResourceOptions.merge(opts, pulumi.ResourceOptions(id=id))
__props__ = _AlertState.__new__(_AlertState)
__props__.__dict__["additional_information"] = additional_information
__props__.__dict__["alert_type"] = alert_type
__props__.__dict__["can_modifies"] = can_modifies
__props__.__dict__["can_views"] = can_views
__props__.__dict__["condition"] = condition
__props__.__dict__["conditions"] = conditions
__props__.__dict__["display_expression"] = display_expression
__props__.__dict__["minutes"] = minutes
__props__.__dict__["name"] = name
__props__.__dict__["notification_resend_frequency_minutes"] = notification_resend_frequency_minutes
__props__.__dict__["resolve_after_minutes"] = resolve_after_minutes
__props__.__dict__["severity"] = severity
__props__.__dict__["tags"] = tags
__props__.__dict__["target"] = target
__props__.__dict__["threshold_targets"] = threshold_targets
return Alert(resource_name, opts=opts, __props__=__props__)
@property
@pulumi.getter(name="additionalInformation")
def additional_information(self) -> pulumi.Output[Optional[str]]:
"""
User-supplied additional explanatory information for this alert.
Useful for linking runbooks, migrations...etc
"""
return pulumi.get(self, "additional_information")
@property
@pulumi.getter(name="alertType")
def alert_type(self) -> pulumi.Output[Optional[str]]:
"""
The type of alert in Wavefront. Either `CLASSIC` (default)
or `THRESHOLD`
"""
return pulumi.get(self, "alert_type")
@property
@pulumi.getter(name="canModifies")
def can_modifies(self) -> pulumi.Output[Sequence[str]]:
"""
A list of users or groups that can modify this resource.
"""
return pulumi.get(self, "can_modifies")
@property
@pulumi.getter(name="canViews")
def can_views(self) -> pulumi.Output[Optional[Sequence[str]]]:
"""
A list of users or groups that can view this resource.
"""
return pulumi.get(self, "can_views")
@property
@pulumi.getter
def condition(self) -> pulumi.Output[Optional[str]]:
"""
A Wavefront query that is evaluated at regular intervals (default 1m).
The alert fires and notifications are triggered when data series matching this query evaluates
to a non-zero value for a set number of consecutive minutes.
"""
return pulumi.get(self, "condition")
@property
@pulumi.getter
def conditions(self) -> pulumi.Output[Optional[Mapping[str, str]]]:
"""
a string->string map of `severity` to `condition`
for which this alert will trigger.
"""
return pulumi.get(self, "conditions")
@property
@pulumi.getter(name="displayExpression")
def display_expression(self) -> pulumi.Output[Optional[str]]:
"""
A second query whose results are displayed in the alert user
interface instead of the condition query. This field is often used to display a version
of the condition query with Boolean operators removed so that numerical values are plotted.
"""
return pulumi.get(self, "display_expression")
@property
@pulumi.getter
def minutes(self) -> pulumi.Output[int]:
"""
The number of consecutive minutes that a series matching the condition query must
evaluate to "true" (non-zero value) before the alert fires.
"""
return pulumi.get(self, "minutes")
@property
@pulumi.getter
def name(self) -> pulumi.Output[str]:
"""
The name of the alert as it is displayed in Wavefront.
"""
return pulumi.get(self, "name")
@property
@pulumi.getter(name="notificationResendFrequencyMinutes")
def notification_resend_frequency_minutes(self) -> pulumi.Output[Optional[int]]:
"""
How often to re-trigger a continually failing alert.
If absent or <= 0, no re-triggering occur.
"""
return pulumi.get(self, "notification_resend_frequency_minutes")
@property
@pulumi.getter(name="resolveAfterMinutes")
def resolve_after_minutes(self) -> pulumi.Output[Optional[int]]:
"""
The number of consecutive minutes that a firing series matching the condition
query must evaluate to "false" (zero value) before the alert resolves. When unset, this default sto
the same value as `minutes`.
"""
return pulumi.get(self, "resolve_after_minutes")
@property
@pulumi.getter
def severity(self) -> pulumi.Output[str]:
"""
- Severity of the alert, valid values are `INFO`, `SMOKE`, `WARN`, `SEVERE`.
"""
return pulumi.get(self, "severity")
@property
@pulumi.getter
def tags(self) -> pulumi.Output[Sequence[str]]:
"""
A set of tags to assign to this resource.
"""
return pulumi.get(self, "tags")
@property
@pulumi.getter
def target(self) -> pulumi.Output[Optional[str]]:
"""
A comma-separated list of the email address or integration endpoint
(such as PagerDuty or web hook) to notify when the alert status changes.
"""
return pulumi.get(self, "target")
@property
@pulumi.getter(name="thresholdTargets")
def threshold_targets(self) -> pulumi.Output[Optional[Mapping[str, str]]]:
"""
Targets for severity
"""
return pulumi.get(self, "threshold_targets")
| 47.634249
| 134
| 0.651724
| 5,372
| 45,062
| 5.306776
| 0.050819
| 0.101866
| 0.077592
| 0.048618
| 0.946226
| 0.937982
| 0.924723
| 0.914761
| 0.912972
| 0.900274
| 0
| 0.001803
| 0.249123
| 45,062
| 945
| 135
| 47.684656
| 0.840732
| 0.377924
| 0
| 0.856275
| 1
| 0
| 0.095654
| 0.036033
| 0
| 0
| 0
| 0
| 0
| 1
| 0.165992
| false
| 0.002024
| 0.010121
| 0
| 0.275304
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 1
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 8
|
34b53af51d7c951adf22c52537a1d910eeef31f7
| 5,802
|
py
|
Python
|
QC/models.py
|
phcavelar/graph-odenet
|
cba1224c041e53ea221e31bf9103ef950b8bd460
|
[
"MIT"
] | 4
|
2019-12-10T18:49:03.000Z
|
2022-02-16T03:21:30.000Z
|
QC/models.py
|
phcavelar/graph-odenet
|
cba1224c041e53ea221e31bf9103ef950b8bd460
|
[
"MIT"
] | 1
|
2020-11-04T04:41:09.000Z
|
2021-01-07T18:52:37.000Z
|
QC/models.py
|
phcavelar/graph-odenet
|
cba1224c041e53ea221e31bf9103ef950b8bd460
|
[
"MIT"
] | 2
|
2020-04-03T12:05:33.000Z
|
2020-10-10T11:57:48.000Z
|
import torch
import torch.nn as nn
import torch.nn.functional as F
from torchdiffeq import odeint_adjoint as odeint
from mpnn import MPNN_enn_edge as MPNN_enn
from set2set import Set2Set
from layers import EdgeEncoderMLP, EdgeGraphConvolution
from torch_scatter import scatter_add
class MPNN_ENN_Sum(nn.Module):
def __init__(self, node_features, edge_features, hidden_features, out_features, processing_steps=12, type="regression", **kwargs):
super(MPNN_ENN_Sum, self).__init__()
self.input = nn.Linear(in_features=node_features,out_features=hidden_features)
self.ee = EdgeEncoderMLP( edge_features, hidden_features )
self.mpnn = MPNN_enn(edge_features, hidden_features)
self.output = nn.Linear(in_features=hidden_features,out_features=out_features)
self.type = type
def forward(self,
node_features, # N x fn :: Float
edge_features, # E x fe :: Float
Esrc, # E :: Long
Etgt, # N x E :: Float
batch, # B x N :: Float
):
batch_size = batch.max().item() + 1
x = node_features
edge_data = self.ee(edge_features)
x = self.input(x)
x = self.mpnn(x,Esrc,Etgt,edge_data)
x = self.output(x)
x = scatter_add(x, batch, dim=0, dim_size=batch_size)
if self.type=="classification":
x = F.log_softmax(x,dim=1)
return x
class MPNN_ENN_Set2Set(nn.Module):
def __init__(self, node_features, edge_features, hidden_features, out_features, processing_steps=12, type="regression", **kwargs):
super(MPNN_ENN_Set2Set, self).__init__()
self.input = nn.Linear(in_features=node_features,out_features=hidden_features)
self.ee = EdgeEncoderMLP( edge_features, hidden_features )
self.mpnn = MPNN_enn(edge_features, hidden_features)
self.s2s = Set2Set(hidden_features, processing_steps, num_layers=1)
self.output = nn.Linear(in_features=hidden_features,out_features=out_features)
self.type = type
def forward(self,
node_features, # N x fn :: Float
edge_features, # E x fe :: Float
Esrc, # E :: Long
Etgt, # N x E :: Float
batch, # B x N :: Float
):
batch_size = batch.max().item() + 1
x = node_features
edge_data = self.ee(edge_features)
x = self.input(x)
x = self.mpnn(x,Esrc,Etgt,edge_data)
x = self.s2s(x,batch)[:,:x.size()[1]]
x = self.output(x)
if self.type=="classification":
x = F.log_softmax(x,dim=1)
return x
class EdgeGCN3_Sum(nn.Module):
def __init__( self, node_features, edge_features, hidden_features, out_features, dropout=0, type="regression" ):
super(EdgeGCN3_Sum, self).__init__()
self.gc1 = EdgeGraphConvolution( node_features, hidden_features )
self.gc2 = EdgeGraphConvolution( hidden_features, hidden_features )
self.gc3 = EdgeGraphConvolution( hidden_features, out_features )
self.dropout = dropout
self.ee1 = EdgeEncoderMLP( edge_features, hidden_features )
self.ee2 = EdgeEncoderMLP( edge_features, hidden_features )
self.ee3 = EdgeEncoderMLP( edge_features, out_features )
#end __init__
def forward(self,
node_features, # N x fn :: Float
edge_features, # E x fe :: Float
Esrc, # E :: Long
Etgt, # N x E :: Float
batch, # B x N :: Float
):
batch_size = batch.max().item() + 1
x = node_features
ef1 = self.ee1(edge_features)
x = F.relu(self.gc1(x, Esrc, Etgt, ef1))
x = F.dropout(x, self.dropout, training=self.training)
ef2 = self.ee2(edge_features)
x = F.relu(self.gc2(x, Esrc, Etgt, ef2))
x = F.dropout(x, self.dropout, training=self.training)
ef3 = self.ee3(edge_features)
x = self.gc3(x, Esrc, Etgt, ef3)
x = scatter_add(x, batch, dim=0, dim_size=batch_size)
return F.log_softmax(x, dim=1) if self.type == "classification" else x
class EdgeGCN3_Set2Set(nn.Module):
def __init__( self, node_features, edge_features, hidden_features, out_features, dropout=0, processing_steps=8, type="regression" ):
super(EdgeGCN3_Set2Set, self).__init__()
self.gc1 = EdgeGraphConvolution( node_features, hidden_features )
self.gc2 = EdgeGraphConvolution( hidden_features, hidden_features )
self.gc3 = EdgeGraphConvolution( hidden_features, out_features )
self.dropout = dropout
self.ee1 = EdgeEncoderMLP( edge_features, hidden_features )
self.ee2 = EdgeEncoderMLP( edge_features, hidden_features )
self.ee3 = EdgeEncoderMLP( edge_features, out_features )
self.s2s = Set2Set(out_features, processing_steps, num_layers=1)
#end __init__
def forward(self,
node_features, # N x fn :: Float
edge_features, # E x fe :: Float
Esrc, # E :: Long
Etgt, # N x E :: Float
batch, # B x N :: Float
):
x = node_features
ef1 = self.ee1(edge_features)
x = F.relu(self.gc1(x, Esrc, Etgt, ef1))
x = F.dropout(x, self.dropout, training=self.training)
ef2 = self.ee2(edge_features)
x = F.relu(self.gc2(x, Esrc, Etgt, ef2))
x = F.dropout(x, self.dropout, training=self.training)
ef3 = self.ee3(edge_features)
x = self.gc3(x, Esrc, Etgt, ef3)
x = self.s2s(x, batch)[:,:x.size()[1]]
return F.log_softmax(x, dim=1) if self.type == "classification" else x
| 41.148936
| 136
| 0.614099
| 745
| 5,802
| 4.554362
| 0.111409
| 0.091954
| 0.129679
| 0.10728
| 0.876805
| 0.876805
| 0.857353
| 0.857353
| 0.847333
| 0.847333
| 0
| 0.01727
| 0.281455
| 5,802
| 140
| 137
| 41.442857
| 0.796594
| 0.053602
| 0
| 0.82906
| 0
| 0
| 0.017557
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0.068376
| false
| 0
| 0.068376
| 0
| 0.205128
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 7
|
34badf3548ee29a23516c04b397b47d0dc060611
| 17,230
|
py
|
Python
|
whisperlib/common/base/callback/print_callback.py
|
cpopescu/whispercast
|
dd4ee1d4fa2e3436fc2387240eb3f5622749d944
|
[
"BSD-3-Clause"
] | null | null | null |
whisperlib/common/base/callback/print_callback.py
|
cpopescu/whispercast
|
dd4ee1d4fa2e3436fc2387240eb3f5622749d944
|
[
"BSD-3-Clause"
] | null | null | null |
whisperlib/common/base/callback/print_callback.py
|
cpopescu/whispercast
|
dd4ee1d4fa2e3436fc2387240eb3f5622749d944
|
[
"BSD-3-Clause"
] | null | null | null |
#!/usr/bin/python
#
# Copyright (c) 2009, Whispersoft s.r.l.
# All rights reserved.
#
# Redistribution and use in source and binary forms, with or without
# modification, are permitted provided that the following conditions are
# met:
#
# * Redistributions of source code must retain the above copyright
# notice, this list of conditions and the following disclaimer.
# * Redistributions in binary form must reproduce the above
# copyright notice, this list of conditions and the following disclaimer
# in the documentation and/or other materials provided with the
# distribution.
# * Neither the name of Whispersoft s.r.l. nor the names of its
# contributors may be used to endorse or promote products derived from
# this software without specific prior written permission.
#
# THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
# "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT
# LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR
# A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT
# OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
# SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT
# LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE,
# DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY
# THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
# (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
# OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
#
# Authors: Catalin Popescu
#
# This handy script generates functor classes (see callback.h, callback1.h
# for examples..)
#
######################################################################
def ClosureN(N):
typenames = ', '.join(['typename T%d' % i for i in range(N)])
params = ', '.join(['T%d p%d' % (i, i) for i in range(N)])
typeargs = ', '.join(['T%d' % i for i in range(N)])
paramnames = ', '.join(['p%d' % (i) for i in range(N)])
print """
template<%s>
class Closure%d : public Closure {
public:
typedef void (*Fun)(%s);
Closure%d(bool is_permanent, Fun fun, %s)
: Closure(is_permanent),
%s
fun_(fun) {
}
protected:
virtual void RunInternal() {
(*fun_)(%s);
}
private:
%s
Fun fun_;
};
template<%s>
Closure%d<%s>* NewCallback(void (*fun)(%s), %s) {
return new Closure%d<%s>(false, fun, %s);
}
template<%s>
Closure%d<%s>* NewPermanentCallback(void (*fun)(%s), %s) {
return new Closure%d<%s>(true, fun, %s);
}
""" % (typenames,
N,
typeargs,
N,
params,
'\n'.join(['p%d_(p%d),' % (i, i) for i in range(N)]),
', '.join(['p%d_' % (i) for i in range(N)]),
'\n'.join(['T%d p%d_;' % (i, i) for i in range(N)]),
typenames,
N, typeargs, typeargs,
params, N, typeargs, paramnames,
typenames,
N, typeargs, typeargs,
params, N, typeargs, paramnames)
######################################################################
def ConstMemberClosureN(N):
typenames = ', '.join(['typename T%d' % i for i in range(N)])
params = ', '.join(['T%d p%d' % (i, i) for i in range(N)])
typeargs = ', '.join(['T%d' % i for i in range(N)])
paramnames = ', '.join(['p%d' % (i) for i in range(N)])
print """
template<typename C, %s>
class ConstMemberClosure%d : public Closure {
public:
typedef void (C::*Fun)(%s) const;
ConstMemberClosure%d(bool is_permanent, const C* c, Fun fun, %s)
: Closure(is_permanent),
c_(c),
%s
fun_(fun) {
}
protected:
virtual void RunInternal() {
(c_->*fun_)(%s);
}
private:
const C* c_;
%s
Fun fun_;
};
template<typename C, %s>
ConstMemberClosure%d<C, %s>* NewCallback(const C* c, void (C::*fun)(%s) const, %s) {
return new ConstMemberClosure%d<C, %s>(false, c, fun, %s);
}
template<typename C, %s>
ConstMemberClosure%d<C, %s>* NewPermanentCallback(C* c, void (C::*fun)(%s) const, %s) {
return new ConstMemberClosure%d<C, %s>(true, c, fun, %s);
}
""" % (typenames,
N,
typeargs,
N,
params,
'\n'.join(['p%d_(p%d),' % (i, i) for i in range(N)]),
', '.join(['p%d_' % (i) for i in range(N)]),
'\n'.join(['T%d p%d_;' % (i, i) for i in range(N)]),
typenames,
N, typeargs, typeargs,
params, N, typeargs, paramnames,
typenames,
N, typeargs, typeargs,
params, N, typeargs, paramnames)
######################################################################
def MemberClosureN(N):
typenames = ', '.join(['typename T%d' % i for i in range(N)])
params = ', '.join(['T%d p%d' % (i, i) for i in range(N)])
typeargs = ', '.join(['T%d' % i for i in range(N)])
paramnames = ', '.join(['p%d' % (i) for i in range(N)])
print """
template<typename C, %s>
class MemberClosure%d : public Closure {
public:
typedef void (C::*Fun)(%s);
MemberClosure%d(bool is_permanent, C* c, Fun fun, %s)
: Closure(is_permanent),
c_(c),
%s
fun_(fun) {
}
protected:
virtual void RunInternal() {
(c_->*fun_)(%s);
}
private:
C* c_;
%s
Fun fun_;
};
template<typename C, %s>
MemberClosure%d<C, %s>* NewCallback(C* c, void (C::*fun)(%s), %s) {
return new MemberClosure%d<C, %s>(false, c, fun, %s);
}
template<typename C, %s>
MemberClosure%d<C, %s>* NewPermanentCallback(C* c, void (C::*fun)(%s), %s) {
return new MemberClosure%d<C, %s>(true, c, fun, %s);
}
""" % (typenames,
N,
typeargs,
N,
params,
'\n'.join(['p%d_(p%d),' % (i, i) for i in range(N)]),
', '.join(['p%d_' % (i) for i in range(N)]),
'\n'.join(['T%d p%d_;' % (i, i) for i in range(N)]),
typenames,
N, typeargs, typeargs,
params, N, typeargs, paramnames,
typenames,
N, typeargs, typeargs,
params, N, typeargs, paramnames)
######################################################################
## Callback X
######################################################################
def CallbackX(M, N):
typenames = ', '.join(['typename T%d' % i for i in range(N)])
typenamesX = ', '.join(['typename X%d' % i for i in range(M)])
params = ', '.join(['T%d p%d' % (i, i) for i in range(N)])
paramsX = ', '.join(['X%d x%d' % (i, i) for i in range(M)])
typeargs = ', '.join(['T%d' % i for i in range(N)])
typeargsX = ', '.join(['X%d' % i for i in range(M)])
paramnames = ', '.join(['p%d' % (i) for i in range(N)])
print """
template<%s, %s>
class Callback%d_%d : public Callback%d<%s> {
public:
typedef void (*Fun)(%s, %s);
Callback%d_%d(bool is_permanent, Fun fun, %s)
: Callback%d<%s>(is_permanent),
%s
fun_(fun) {
}
protected:
virtual void RunInternal(%s) {
(*fun_)(%s, %s);
}
private:
%s
Fun fun_;
};
template<%s, %s>
Callback%d_%d<%s, %s>* NewCallback(void (*fun)(%s, %s), %s) {
return new Callback%d_%d<%s, %s>(false, fun, %s);
}
template<%s, %s>
Callback%d_%d<%s, %s>* NewPermanentCallback(void (*fun)(%s, %s), %s) {
return new Callback%d_%d<%s, %s>(true, fun, %s);
}
""" % (typenames, typenamesX,
M, N,
M, typeargsX,
typeargs, typeargsX,
M, N, params,
M, typeargsX,
'\n'.join(['p%d_(p%d),' % (i, i) for i in range(N)]),
paramsX,
', '.join(['p%d_' % (i) for i in range(N)]),
', '.join(['x%d' % (i) for i in range(M)]),
'\n'.join(['T%d p%d_;' % (i, i) for i in range(N)]),
typenames, typenamesX,
M, N, typeargs, typeargsX, typeargs, typeargsX,
params, M, N, typeargs, typeargsX, paramnames,
typenames, typenamesX,
M, N, typeargs, typeargsX, typeargs, typeargsX,
params, M, N, typeargs, typeargsX, paramnames)
######################################################################
def ConstMemberCallbackX(M, N):
typenames = ', '.join(['typename T%d' % i for i in range(N)])
typenamesX = ', '.join(['typename X%d' % i for i in range(M)])
params = ', '.join(['T%d p%d' % (i, i) for i in range(N)])
paramsX = ', '.join(['X%d x%d' % (i, i) for i in range(M)])
typeargs = ', '.join(['T%d' % i for i in range(N)])
typeargsX = ', '.join(['X%d' % i for i in range(M)])
paramnames = ', '.join(['p%d' % (i) for i in range(N)])
print """
template<typename C, %s, %s>
class ConstMemberCallback%d_%d : public Callback%d<%s> {
public:
typedef void (C::*Fun)(%s, %s) const;
ConstMemberCallback%d_%d (bool is_permanent, const C* c, Fun fun, %s)
: Callback%d<%s>(is_permanent),
c_(c),
%s
fun_(fun) {
}
protected:
virtual void RunInternal(%s) {
(c_->*fun_)(%s, %s);
}
private:
const C* c_;
%s
Fun fun_;
}; """ % ( typenames, typenamesX,
M, N,
M, typeargsX,
typeargs, typeargsX,
M, N, params,
M, typeargsX,
'\n'.join(['p%d_(p%d),' % (i, i) for i in range(N)]),
paramsX,
', '.join(['p%d_' % (i) for i in range(N)]),
', '.join(['x%d' % (i) for i in range(M)]),
'\n'.join(['T%d p%d_;' % (i, i) for i in range(N)])
)
print """
template<typename C, %s, %s>
ConstMemberCallback%d_%d<C, %s, %s>* NewCallback(const C* c, void (C::*fun)(%s, %s) const, %s) {
return new ConstMemberCallback%d_%d<C, %s, %s>(false, c, fun, %s);
}
template<typename C, %s, %s>
ConstMemberCallback%d_%d<C, %s, %s>* NewPermanentCallback(const C* c, void (C::*fun)(%s, %s) const, %s) {
return new ConstMemberCallback%d_%d<C, %s, %s>(true, c, fun, %s);
}
""" % (
typenames, typenamesX,
M, N, typeargs, typeargsX, typeargs, typeargsX,
params, M, N, typeargs, typeargsX, paramnames,
typenames, typenamesX,
M, N, typeargs, typeargsX, typeargs, typeargsX,
params, M, N, typeargs, typeargsX, paramnames)
######################################################################
def MemberCallbackX(M, N):
typenames = ', '.join(['typename T%d' % i for i in range(N)])
typenamesX = ', '.join(['typename X%d' % i for i in range(M)])
params = ', '.join(['T%d p%d' % (i, i) for i in range(N)])
paramsX = ', '.join(['X%d x%d' % (i, i) for i in range(M)])
typeargs = ', '.join(['T%d' % i for i in range(N)])
typeargsX = ', '.join(['X%d' % i for i in range(M)])
paramnames = ', '.join(['p%d' % (i) for i in range(N)])
print """
template<typename C, %s, %s>
class MemberCallback%d_%d : public Callback%d<%s> {
public:
typedef void (C::*Fun)(%s, %s);
MemberCallback%d_%d (bool is_permanent, C* c, Fun fun, %s)
: Callback%d<%s>(is_permanent),
c_(c),
%s
fun_(fun) {
}
protected:
virtual void RunInternal(%s) {
(c_->*fun_)(%s, %s);
}
private:
C* c_;
%s
Fun fun_;
}; """ % ( typenames, typenamesX,
M, N,
M, typeargsX,
typeargs, typeargsX,
M, N, params,
M, typeargsX,
'\n'.join(['p%d_(p%d),' % (i, i) for i in range(N)]),
paramsX,
', '.join(['p%d_' % (i) for i in range(N)]),
', '.join(['x%d' % (i) for i in range(M)]),
'\n'.join(['T%d p%d_;' % (i, i) for i in range(N)])
)
print """
template<typename C, %s, %s>
MemberCallback%d_%d<C, %s, %s>* NewCallback(C* c, void (C::*fun)(%s, %s), %s) {
return new MemberCallback%d_%d<C, %s, %s>(false, c, fun, %s);
}
template<typename C, %s, %s>
MemberCallback%d_%d<C, %s, %s>* NewPermanentCallback(C* c, void (C::*fun)(%s, %s), %s) {
return new MemberCallback%d_%d<C, %s, %s>(true, c, fun, %s);
}
""" % (
typenames, typenamesX,
M, N, typeargs, typeargsX, typeargs, typeargsX,
params, M, N, typeargs, typeargsX, paramnames,
typenames, typenamesX,
M, N, typeargs, typeargsX, typeargs, typeargsX,
params, M, N, typeargs, typeargsX, paramnames)
######################################################################
## ResultCallback X
######################################################################
def ResultCallbackX(M, N):
typenames = ', '.join(['typename T%d' % i for i in range(N)])
typenamesX = ', '.join(['typename X%d' % i for i in range(M)])
params = ', '.join(['T%d p%d' % (i, i) for i in range(N)])
paramsX = ', '.join(['X%d x%d' % (i, i) for i in range(M)])
typeargs = ', '.join(['T%d' % i for i in range(N)])
typeargsX = ', '.join(['X%d' % i for i in range(M)])
paramnames = ', '.join(['p%d' % (i) for i in range(N)])
print """
template<typename R, %s, %s>
class ResultCallback%d_%d : public ResultCallback%d<R, %s> {
public:
typedef R (*Fun)(%s, %s);
ResultCallback%d_%d(bool is_permanent, Fun fun, %s)
: ResultCallback%d<R, %s>(is_permanent),
%s
fun_(fun) {
}
protected:
virtual R RunInternal(%s) {
return (*fun_)(%s, %s);
}
private:
%s
Fun fun_;
};
template<typename R, %s, %s>
ResultCallback%d_%d<R, %s, %s>* NewCallback(R (*fun)(%s, %s), %s) {
return new ResultCallback%d_%d<R, %s, %s>(false, fun, %s);
}
template<typename R, %s, %s>
ResultCallback%d_%d<R, %s, %s>* NewPermanentCallback(R (*fun)(%s, %s), %s) {
return new ResultCallback%d_%d<R, %s, %s>(true, fun, %s);
}
""" % (typenames, typenamesX,
M, N,
M, typeargsX,
typeargs, typeargsX,
M, N, params,
M, typeargsX,
'\n'.join(['p%d_(p%d),' % (i, i) for i in range(N)]),
paramsX,
', '.join(['p%d_' % (i) for i in range(N)]),
', '.join(['x%d' % (i) for i in range(M)]),
'\n'.join(['T%d p%d_;' % (i, i) for i in range(N)]),
typenames, typenamesX,
M, N, typeargs, typeargsX, typeargs, typeargsX,
params, M, N, typeargs, typeargsX, paramnames,
typenames, typenamesX,
M, N, typeargs, typeargsX, typeargs, typeargsX,
params, M, N, typeargs, typeargsX, paramnames)
######################################################################
def ResultConstMemberCallbackX(M, N):
typenames = ', '.join(['typename T%d' % i for i in range(N)])
typenamesX = ', '.join(['typename X%d' % i for i in range(M)])
params = ', '.join(['T%d p%d' % (i, i) for i in range(N)])
paramsX = ', '.join(['X%d x%d' % (i, i) for i in range(M)])
typeargs = ', '.join(['T%d' % i for i in range(N)])
typeargsX = ', '.join(['X%d' % i for i in range(M)])
paramnames = ', '.join(['p%d' % (i) for i in range(N)])
print """
template<typename C, typename R, %s, %s>
class ResultConstMemberCallback%d_%d : public ResultCallback%d<R, %s> {
public:
typedef R (C::*Fun)(%s, %s) const;
ResultConstMemberCallback%d_%d (bool is_permanent, const C* c, Fun fun, %s)
: ResultCallback%d<R, %s>(is_permanent),
c_(c),
%s
fun_(fun) {
}
protected:
virtual R RunInternal(%s) {
return (c_->*fun_)(%s, %s);
}
private:
const C* c_;
%s
Fun fun_;
}; """ % ( typenames, typenamesX,
M, N,
M, typeargsX,
typeargs, typeargsX,
M, N, params,
M, typeargsX,
'\n'.join(['p%d_(p%d),' % (i, i) for i in range(N)]),
paramsX,
', '.join(['p%d_' % (i) for i in range(N)]),
', '.join(['x%d' % (i) for i in range(M)]),
'\n'.join(['T%d p%d_;' % (i, i) for i in range(N)])
)
print """
template<typename C, typename R, %s, %s>
ResultConstMemberCallback%d_%d<C, R, %s, %s>* NewCallback(const C* c, R (C::*fun)(%s, %s) const, %s) {
return new ResultConstMemberCallback%d_%d<C, R, %s, %s>(false, c, fun, %s);
}
template<typename C, typename R, %s, %s>
ResultConstMemberCallback%d_%d<C, R, %s, %s>* NewPermanentCallback(const C* c, R (C::*fun)(%s, %s) const, %s) {
return new ResultConstMemberCallback%d_%d<C, R, %s, %s>(true, c, fun, %s);
}
""" % (
typenames, typenamesX,
M, N, typeargs, typeargsX, typeargs, typeargsX,
params, M, N, typeargs, typeargsX, paramnames,
typenames, typenamesX,
M, N, typeargs, typeargsX, typeargs, typeargsX,
params, M, N, typeargs, typeargsX, paramnames)
######################################################################
def ResultMemberCallbackX(M, N):
typenames = ', '.join(['typename T%d' % i for i in range(N)])
typenamesX = ', '.join(['typename X%d' % i for i in range(M)])
params = ', '.join(['T%d p%d' % (i, i) for i in range(N)])
paramsX = ', '.join(['X%d x%d' % (i, i) for i in range(M)])
typeargs = ', '.join(['T%d' % i for i in range(N)])
typeargsX = ', '.join(['X%d' % i for i in range(M)])
paramnames = ', '.join(['p%d' % (i) for i in range(N)])
print """
template<typename C, typename R, %s, %s>
class ResultMemberCallback%d_%d : public ResultCallback%d<R, %s> {
public:
typedef R (C::*Fun)(%s, %s);
ResultMemberCallback%d_%d (bool is_permanent, C* c, Fun fun, %s)
: ResultCallback%d<R, %s>(is_permanent),
c_(c),
%s
fun_(fun) {
}
protected:
virtual R RunInternal(%s) {
return (c_->*fun_)(%s, %s);
}
private:
C* c_;
%s
Fun fun_;
}; """ % ( typenames, typenamesX,
M, N,
M, typeargsX,
typeargs, typeargsX,
M, N, params,
M, typeargsX,
'\n'.join(['p%d_(p%d),' % (i, i) for i in range(N)]),
paramsX,
', '.join(['p%d_' % (i) for i in range(N)]),
', '.join(['x%d' % (i) for i in range(M)]),
'\n'.join(['T%d p%d_;' % (i, i) for i in range(N)])
)
print """
template<typename C, typename R, %s, %s>
ResultMemberCallback%d_%d<C, R, %s, %s>* NewCallback(C* c, R (C::*fun)(%s, %s), %s) {
return new ResultMemberCallback%d_%d<C, R, %s, %s>(false, c, fun, %s);
}
template<typename C, typename R, %s, %s>
ResultMemberCallback%d_%d<C, R, %s, %s>* NewPermanentCallback(C* c, R (C::*fun)(%s, %s), %s) {
return new ResultMemberCallback%d_%d<C, R, %s, %s>(true, c, fun, %s);
}
""" % (
typenames, typenamesX,
M, N, typeargs, typeargsX, typeargs, typeargsX,
params, M, N, typeargs, typeargsX, paramnames,
typenames, typenamesX,
M, N, typeargs, typeargsX, typeargs, typeargsX,
params, M, N, typeargs, typeargsX, paramnames)
for i in range(1, 10):
ClosureN(i)
MemberClosureN(i)
ConstMemberClosureN(i)
#for i in range(1, 10):
# CallbackX(1,i)
# ConstMemberCallbackX(1, i)
# MemberCallbackX(1, i)
#for i in range(1, 10):
# ResultCallbackX(1,i)
# ResultConstMemberCallbackX(1, i)
# ResultMemberCallbackX(1, i)
| 31.157324
| 111
| 0.576669
| 2,615
| 17,230
| 3.751434
| 0.075717
| 0.036697
| 0.055046
| 0.100917
| 0.82314
| 0.808053
| 0.789501
| 0.775331
| 0.754434
| 0.70581
| 0
| 0.001409
| 0.176204
| 17,230
| 552
| 112
| 31.213768
| 0.689728
| 0.106326
| 0
| 0.744086
| 0
| 0.055914
| 0.491015
| 0.08203
| 0
| 0
| 0
| 0
| 0
| 0
| null | null | 0
| 0
| null | null | 0.027957
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 8
|
34f117fce70642b974bce4037aece6130a6a75e8
| 2,667
|
py
|
Python
|
ProjectEuler/pr0008.py
|
DayGitH/Python-Challenges
|
bc32f1332a92fcc2dfa6f5ea4d95f8a8d64c3edf
|
[
"MIT"
] | 2
|
2020-12-23T18:59:22.000Z
|
2021-04-14T13:16:09.000Z
|
ProjectEuler/pr0008.py
|
DayGitH/Python-Challenges
|
bc32f1332a92fcc2dfa6f5ea4d95f8a8d64c3edf
|
[
"MIT"
] | null | null | null |
ProjectEuler/pr0008.py
|
DayGitH/Python-Challenges
|
bc32f1332a92fcc2dfa6f5ea4d95f8a8d64c3edf
|
[
"MIT"
] | null | null | null |
"""
The four adjacent digits in the 1000-digit number that have the greatest product are 9 × 9 × 8 × 9 = 5832.
73167176531330624919225119674426574742355349194934
96983520312774506326239578318016984801869478851843
85861560789112949495459501737958331952853208805511
12540698747158523863050715693290963295227443043557
66896648950445244523161731856403098711121722383113
62229893423380308135336276614282806444486645238749
30358907296290491560440772390713810515859307960866
70172427121883998797908792274921901699720888093776
65727333001053367881220235421809751254540594752243
52584907711670556013604839586446706324415722155397
53697817977846174064955149290862569321978468622482
83972241375657056057490261407972968652414535100474
82166370484403199890008895243450658541227588666881
16427171479924442928230863465674813919123162824586
17866458359124566529476545682848912883142607690042
24219022671055626321111109370544217506941658960408
07198403850962455444362981230987879927244284909188
84580156166097919133875499200524063689912560717606
05886116467109405077541002256983155200055935729725
71636269561882670428252483600823257530420752963450
Find the thirteen adjacent digits in the 1000-digit number that have the greatest product. What is the value of this product?
"""
def consecutive_prod(number,string):
max_prod = 0
for i in range(number-1,len(string)):
prod = 1
for j in range(number):
prod *= int(string[i-j])
if prod > max_prod:
max_prod = prod
answer_list = string[i-number+1:i+1]
print(prod)
print(max_prod)
print(answer_list)
string = '7316717653133062491922511967442657474235534919493496983520312774506326239578318016984801869478851843858615607891129494954595017379583319528532088055111254069874715852386305071569329096329522744304355766896648950445244523161731856403098711121722383113622298934233803081353362766142828064444866452387493035890729629049156044077239071381051585930796086670172427121883998797908792274921901699720888093776657273330010533678812202354218097512545405947522435258490771167055601360483958644670632441572215539753697817977846174064955149290862569321978468622482839722413756570560574902614079729686524145351004748216637048440319989000889524345065854122758866688116427171479924442928230863465674813919123162824586178664583591245665294765456828489128831426076900422421902267105562632111110937054421750694165896040807198403850962455444362981230987879927244284909188845801561660979191338754992005240636899125607176060588611646710940507754100225698315520005593572972571636269561882670428252483600823257530420752963451'
consecutive_prod(13,string)
| 62.023256
| 1,011
| 0.901762
| 122
| 2,667
| 19.672131
| 0.516393
| 0.011667
| 0.013333
| 0.015833
| 0.05
| 0.05
| 0.05
| 0.05
| 0.05
| 0.05
| 0
| 0.82069
| 0.075741
| 2,667
| 43
| 1,012
| 62.023256
| 0.151724
| 0.469816
| 0
| 0
| 0
| 0
| 0.710732
| 0.710732
| 0
| 1
| 0
| 0
| 0
| 1
| 0.071429
| false
| 0
| 0
| 0
| 0.071429
| 0.214286
| 0
| 0
| 1
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 1
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 1
| 1
| null | 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 7
|
34f2926f4188e10cd4486724ad0e24bcab32946a
| 4,254
|
py
|
Python
|
CrimeTypeTrend/crimeTypeClass.py
|
pauljrodriguezcs/Chicago_Crime_Analysis
|
8f385fdfbb8b770631a458edf03f90836f33b674
|
[
"MIT"
] | 1
|
2020-02-12T16:25:23.000Z
|
2020-02-12T16:25:23.000Z
|
CrimeTypeTrend/crimeTypeClass.py
|
pauljrodriguezcs/Chicago_Crime_Analysis
|
8f385fdfbb8b770631a458edf03f90836f33b674
|
[
"MIT"
] | null | null | null |
CrimeTypeTrend/crimeTypeClass.py
|
pauljrodriguezcs/Chicago_Crime_Analysis
|
8f385fdfbb8b770631a458edf03f90836f33b674
|
[
"MIT"
] | null | null | null |
import numpy as np
import matplotlib
import matplotlib.pyplot as plt
import scipy.linalg as la
print("loading CrimeMapPerMonth... ")
CrimeMap = np.loadtxt('LatLongZip.txt',delimiter=',',dtype=float) # load data
print("done ... ")
### Fall
print("loading ... ")
CrimePerZipcode = np.loadtxt('Fall2013to2018.txt',delimiter=',',dtype=float)
print("done ... ")
# 1: theft
# 2: battery
# 3: criminal damage
# 4: narcotics
# 5: assault
# 6: other
x_coord = []
y_coord = []
crimes = [1.0,2.0,3.0,4.0,5.0,6.0]
print("for loop ... ")
colors = ['r','b', 'g', 'c', 'y','k']
plt.figure()
for c in range(len(crimes)):
print(crimes[c])
# plt.subplot(2,3,(c+1))
x = []
y = []
for i in range(CrimePerZipcode.shape[0]):
crime = CrimePerZipcode[i][2]
if(crime == crimes[c]):
for j in range(CrimeMap.shape[0]):
zipcode = CrimePerZipcode[i][1]
zc = CrimeMap[j][2]
if(zipcode == zc):
x.append(CrimeMap[j][0])
y.append(CrimeMap[j][1])
x_coord.append(x)
y_coord.append(y)
print("done ... ")
print("plotting ... ")
for ind in range(len(x_coord)):
plt.plot(y_coord[ind],x_coord[ind],color=colors[ind],marker=',',linestyle='None')
plt.title("Fall 2013-2018")
plt.axis([-87.96,-87.49,41.6,42.1])
plt.show()
### Spring
print("loading ... ")
CrimePerZipcode = np.loadtxt('Spring2013to2018.txt',delimiter=',',dtype=float)
print("done ... ")
# 1: theft
# 2: battery
# 3: criminal damage
# 4: narcotics
# 5: assault
# 6: other
x_coord = []
y_coord = []
crimes = [1.0,2.0,3.0,4.0,5.0,6.0]
print("for loop ... ")
colors = ['r','b', 'g', 'c', 'y','k']
plt.figure()
for c in range(len(crimes)):
print(crimes[c])
# plt.subplot(2,3,(c+1))
x = []
y = []
for i in range(CrimePerZipcode.shape[0]):
crime = CrimePerZipcode[i][2]
if(crime == crimes[c]):
for j in range(CrimeMap.shape[0]):
zipcode = CrimePerZipcode[i][1]
zc = CrimeMap[j][2]
if(zipcode == zc):
x.append(CrimeMap[j][0])
y.append(CrimeMap[j][1])
x_coord.append(x)
y_coord.append(y)
print("done ... ")
print("plotting ... ")
for ind in range(len(x_coord)):
plt.plot(y_coord[ind],x_coord[ind],color=colors[ind],marker=',',linestyle='None')
plt.title("Spring 2013-2018")
plt.axis([-87.96,-87.49,41.6,42.1])
plt.show()
### Summer
print("loading ... ")
CrimePerZipcode = np.loadtxt('Summer2013to2018.txt',delimiter=',',dtype=float)
print("done ... ")
# 1: theft
# 2: battery
# 3: criminal damage
# 4: narcotics
# 5: assault
# 6: other
x_coord = []
y_coord = []
crimes = [1.0,2.0,3.0,4.0,5.0,6.0]
print("for loop ... ")
colors = ['r','b', 'g', 'c', 'y','k']
plt.figure()
for c in range(len(crimes)):
print(crimes[c])
# plt.subplot(2,3,(c+1))
x = []
y = []
for i in range(CrimePerZipcode.shape[0]):
crime = CrimePerZipcode[i][2]
if(crime == crimes[c]):
for j in range(CrimeMap.shape[0]):
zipcode = CrimePerZipcode[i][1]
zc = CrimeMap[j][2]
if(zipcode == zc):
x.append(CrimeMap[j][0])
y.append(CrimeMap[j][1])
x_coord.append(x)
y_coord.append(y)
print("done ... ")
print("plotting ... ")
for ind in range(len(x_coord)):
plt.plot(y_coord[ind],x_coord[ind],color=colors[ind],marker=',',linestyle='None')
plt.title("Summer 2013-2018")
plt.axis([-87.96,-87.49,41.6,42.1])
plt.show()
### Winter
print("loading ... ")
CrimePerZipcode = np.loadtxt('Winter2013to2018.txt',delimiter=',',dtype=float)
print("done ... ")
# 1: theft
# 2: battery
# 3: criminal damage
# 4: narcotics
# 5: assault
# 6: other
x_coord = []
y_coord = []
crimes = [1.0,2.0,3.0,4.0,5.0,6.0]
print("for loop ... ")
colors = ['r','b', 'g', 'c', 'y','k']
plt.figure()
for c in range(len(crimes)):
print(crimes[c])
# plt.subplot(2,3,(c+1))
x = []
y = []
for i in range(CrimePerZipcode.shape[0]):
crime = CrimePerZipcode[i][2]
if(crime == crimes[c]):
for j in range(CrimeMap.shape[0]):
zipcode = CrimePerZipcode[i][1]
zc = CrimeMap[j][2]
if(zipcode == zc):
x.append(CrimeMap[j][0])
y.append(CrimeMap[j][1])
x_coord.append(x)
y_coord.append(y)
print("done ... ")
print("plotting ... ")
for ind in range(len(x_coord)):
plt.plot(y_coord[ind],x_coord[ind],color=colors[ind],marker=',',linestyle='None')
plt.title("Winter 2013-2018")
plt.axis([-87.96,-87.49,41.6,42.1])
plt.show()
| 21.059406
| 82
| 0.614716
| 691
| 4,254
| 3.743849
| 0.127352
| 0.037109
| 0.030924
| 0.04252
| 0.892153
| 0.83649
| 0.83649
| 0.83649
| 0.83649
| 0.83649
| 0
| 0.064409
| 0.153268
| 4,254
| 201
| 83
| 21.164179
| 0.653803
| 0.097555
| 0
| 0.89313
| 0
| 0
| 0.122234
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| false
| 0
| 0.030534
| 0
| 0.030534
| 0.198473
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 7
|
9b2c588cd5c877fc050c3f37292f5f8479276525
| 17,312
|
py
|
Python
|
pysnmp/FNET-OPTIVIEW-WAN-MIB.py
|
agustinhenze/mibs.snmplabs.com
|
1fc5c07860542b89212f4c8ab807057d9a9206c7
|
[
"Apache-2.0"
] | 11
|
2021-02-02T16:27:16.000Z
|
2021-08-31T06:22:49.000Z
|
pysnmp/FNET-OPTIVIEW-WAN-MIB.py
|
agustinhenze/mibs.snmplabs.com
|
1fc5c07860542b89212f4c8ab807057d9a9206c7
|
[
"Apache-2.0"
] | 75
|
2021-02-24T17:30:31.000Z
|
2021-12-08T00:01:18.000Z
|
pysnmp/FNET-OPTIVIEW-WAN-MIB.py
|
agustinhenze/mibs.snmplabs.com
|
1fc5c07860542b89212f4c8ab807057d9a9206c7
|
[
"Apache-2.0"
] | 10
|
2019-04-30T05:51:36.000Z
|
2022-02-16T03:33:41.000Z
|
#
# PySNMP MIB module FNET-OPTIVIEW-WAN-MIB (http://snmplabs.com/pysmi)
# ASN.1 source file:///Users/davwang4/Dev/mibs.snmplabs.com/asn1/FNET-OPTIVIEW-WAN-MIB
# Produced by pysmi-0.3.4 at Mon Apr 29 19:00:31 2019
# On host DAVWANG4-M-1475 platform Darwin version 18.5.0 by user davwang4
# Using Python version 3.7.3 (default, Mar 27 2019, 09:23:15)
#
Integer, OctetString, ObjectIdentifier = mibBuilder.importSymbols("ASN1", "Integer", "OctetString", "ObjectIdentifier")
NamedValues, = mibBuilder.importSymbols("ASN1-ENUMERATION", "NamedValues")
ConstraintsIntersection, SingleValueConstraint, ValueSizeConstraint, ConstraintsUnion, ValueRangeConstraint = mibBuilder.importSymbols("ASN1-REFINEMENT", "ConstraintsIntersection", "SingleValueConstraint", "ValueSizeConstraint", "ConstraintsUnion", "ValueRangeConstraint")
fnetOptiViewGeneric, = mibBuilder.importSymbols("FNET-GLOBAL-REG", "fnetOptiViewGeneric")
ovTrapDescription, ovTrapOffenderSubId, ovTrapSeverity, ovTrapAgentSysName, ovTrapOffenderName, ovTrapStatus, ovTrapOffenderNetAddr = mibBuilder.importSymbols("FNET-OPTIVIEW-GENERIC-MIB", "ovTrapDescription", "ovTrapOffenderSubId", "ovTrapSeverity", "ovTrapAgentSysName", "ovTrapOffenderName", "ovTrapStatus", "ovTrapOffenderNetAddr")
ModuleCompliance, NotificationGroup = mibBuilder.importSymbols("SNMPv2-CONF", "ModuleCompliance", "NotificationGroup")
Integer32, MibIdentifier, Counter64, Bits, Counter32, Unsigned32, iso, MibScalar, MibTable, MibTableRow, MibTableColumn, ModuleIdentity, ObjectIdentity, TimeTicks, Gauge32, NotificationType, NotificationType, IpAddress = mibBuilder.importSymbols("SNMPv2-SMI", "Integer32", "MibIdentifier", "Counter64", "Bits", "Counter32", "Unsigned32", "iso", "MibScalar", "MibTable", "MibTableRow", "MibTableColumn", "ModuleIdentity", "ObjectIdentity", "TimeTicks", "Gauge32", "NotificationType", "NotificationType", "IpAddress")
DisplayString, TextualConvention = mibBuilder.importSymbols("SNMPv2-TC", "DisplayString", "TextualConvention")
probSonetLinkDown = NotificationType((1, 3, 6, 1, 4, 1, 1226, 2, 1) + (0,12000)).setObjects(("FNET-OPTIVIEW-GENERIC-MIB", "ovTrapAgentSysName"), ("FNET-OPTIVIEW-GENERIC-MIB", "ovTrapSeverity"), ("FNET-OPTIVIEW-GENERIC-MIB", "ovTrapStatus"), ("FNET-OPTIVIEW-GENERIC-MIB", "ovTrapDescription"), ("FNET-OPTIVIEW-GENERIC-MIB", "ovTrapOffenderName"), ("FNET-OPTIVIEW-GENERIC-MIB", "ovTrapOffenderNetAddr"), ("FNET-OPTIVIEW-GENERIC-MIB", "ovTrapOffenderSubId"))
probSonetErrors = NotificationType((1, 3, 6, 1, 4, 1, 1226, 2, 1) + (0,12001)).setObjects(("FNET-OPTIVIEW-GENERIC-MIB", "ovTrapAgentSysName"), ("FNET-OPTIVIEW-GENERIC-MIB", "ovTrapSeverity"), ("FNET-OPTIVIEW-GENERIC-MIB", "ovTrapStatus"), ("FNET-OPTIVIEW-GENERIC-MIB", "ovTrapDescription"), ("FNET-OPTIVIEW-GENERIC-MIB", "ovTrapOffenderName"), ("FNET-OPTIVIEW-GENERIC-MIB", "ovTrapOffenderNetAddr"), ("FNET-OPTIVIEW-GENERIC-MIB", "ovTrapOffenderSubId"))
probSonetAlarms = NotificationType((1, 3, 6, 1, 4, 1, 1226, 2, 1) + (0,12002)).setObjects(("FNET-OPTIVIEW-GENERIC-MIB", "ovTrapAgentSysName"), ("FNET-OPTIVIEW-GENERIC-MIB", "ovTrapSeverity"), ("FNET-OPTIVIEW-GENERIC-MIB", "ovTrapStatus"), ("FNET-OPTIVIEW-GENERIC-MIB", "ovTrapDescription"), ("FNET-OPTIVIEW-GENERIC-MIB", "ovTrapOffenderName"), ("FNET-OPTIVIEW-GENERIC-MIB", "ovTrapOffenderNetAddr"), ("FNET-OPTIVIEW-GENERIC-MIB", "ovTrapOffenderSubId"))
probAtmLinkDown = NotificationType((1, 3, 6, 1, 4, 1, 1226, 2, 1) + (0,12003)).setObjects(("FNET-OPTIVIEW-GENERIC-MIB", "ovTrapAgentSysName"), ("FNET-OPTIVIEW-GENERIC-MIB", "ovTrapSeverity"), ("FNET-OPTIVIEW-GENERIC-MIB", "ovTrapStatus"), ("FNET-OPTIVIEW-GENERIC-MIB", "ovTrapDescription"), ("FNET-OPTIVIEW-GENERIC-MIB", "ovTrapOffenderName"), ("FNET-OPTIVIEW-GENERIC-MIB", "ovTrapOffenderNetAddr"), ("FNET-OPTIVIEW-GENERIC-MIB", "ovTrapOffenderSubId"))
probAtmErrors = NotificationType((1, 3, 6, 1, 4, 1, 1226, 2, 1) + (0,12004)).setObjects(("FNET-OPTIVIEW-GENERIC-MIB", "ovTrapAgentSysName"), ("FNET-OPTIVIEW-GENERIC-MIB", "ovTrapSeverity"), ("FNET-OPTIVIEW-GENERIC-MIB", "ovTrapStatus"), ("FNET-OPTIVIEW-GENERIC-MIB", "ovTrapDescription"), ("FNET-OPTIVIEW-GENERIC-MIB", "ovTrapOffenderName"), ("FNET-OPTIVIEW-GENERIC-MIB", "ovTrapOffenderNetAddr"), ("FNET-OPTIVIEW-GENERIC-MIB", "ovTrapOffenderSubId"))
probPDUCRCErrors = NotificationType((1, 3, 6, 1, 4, 1, 1226, 2, 1) + (0,12005)).setObjects(("FNET-OPTIVIEW-GENERIC-MIB", "ovTrapAgentSysName"), ("FNET-OPTIVIEW-GENERIC-MIB", "ovTrapSeverity"), ("FNET-OPTIVIEW-GENERIC-MIB", "ovTrapStatus"), ("FNET-OPTIVIEW-GENERIC-MIB", "ovTrapDescription"), ("FNET-OPTIVIEW-GENERIC-MIB", "ovTrapOffenderName"), ("FNET-OPTIVIEW-GENERIC-MIB", "ovTrapOffenderNetAddr"), ("FNET-OPTIVIEW-GENERIC-MIB", "ovTrapOffenderSubId"))
probPosLinkDown = NotificationType((1, 3, 6, 1, 4, 1, 1226, 2, 1) + (0,12006)).setObjects(("FNET-OPTIVIEW-GENERIC-MIB", "ovTrapAgentSysName"), ("FNET-OPTIVIEW-GENERIC-MIB", "ovTrapSeverity"), ("FNET-OPTIVIEW-GENERIC-MIB", "ovTrapStatus"), ("FNET-OPTIVIEW-GENERIC-MIB", "ovTrapDescription"), ("FNET-OPTIVIEW-GENERIC-MIB", "ovTrapOffenderName"), ("FNET-OPTIVIEW-GENERIC-MIB", "ovTrapOffenderNetAddr"), ("FNET-OPTIVIEW-GENERIC-MIB", "ovTrapOffenderSubId"))
probPosErrorsDetected = NotificationType((1, 3, 6, 1, 4, 1, 1226, 2, 1) + (0,12007)).setObjects(("FNET-OPTIVIEW-GENERIC-MIB", "ovTrapAgentSysName"), ("FNET-OPTIVIEW-GENERIC-MIB", "ovTrapSeverity"), ("FNET-OPTIVIEW-GENERIC-MIB", "ovTrapStatus"), ("FNET-OPTIVIEW-GENERIC-MIB", "ovTrapDescription"), ("FNET-OPTIVIEW-GENERIC-MIB", "ovTrapOffenderName"), ("FNET-OPTIVIEW-GENERIC-MIB", "ovTrapOffenderNetAddr"), ("FNET-OPTIVIEW-GENERIC-MIB", "ovTrapOffenderSubId"))
probLinkUtilization = NotificationType((1, 3, 6, 1, 4, 1, 1226, 2, 1) + (0,12008)).setObjects(("FNET-OPTIVIEW-GENERIC-MIB", "ovTrapAgentSysName"), ("FNET-OPTIVIEW-GENERIC-MIB", "ovTrapSeverity"), ("FNET-OPTIVIEW-GENERIC-MIB", "ovTrapStatus"), ("FNET-OPTIVIEW-GENERIC-MIB", "ovTrapDescription"), ("FNET-OPTIVIEW-GENERIC-MIB", "ovTrapOffenderName"), ("FNET-OPTIVIEW-GENERIC-MIB", "ovTrapOffenderNetAddr"), ("FNET-OPTIVIEW-GENERIC-MIB", "ovTrapOffenderSubId"))
probDNSServerNoResp = NotificationType((1, 3, 6, 1, 4, 1, 1226, 2, 1) + (0,12009)).setObjects(("FNET-OPTIVIEW-GENERIC-MIB", "ovTrapAgentSysName"), ("FNET-OPTIVIEW-GENERIC-MIB", "ovTrapSeverity"), ("FNET-OPTIVIEW-GENERIC-MIB", "ovTrapStatus"), ("FNET-OPTIVIEW-GENERIC-MIB", "ovTrapDescription"), ("FNET-OPTIVIEW-GENERIC-MIB", "ovTrapOffenderName"), ("FNET-OPTIVIEW-GENERIC-MIB", "ovTrapOffenderNetAddr"), ("FNET-OPTIVIEW-GENERIC-MIB", "ovTrapOffenderSubId"))
probDNSServerNowUsing = NotificationType((1, 3, 6, 1, 4, 1, 1226, 2, 1) + (0,12010)).setObjects(("FNET-OPTIVIEW-GENERIC-MIB", "ovTrapAgentSysName"), ("FNET-OPTIVIEW-GENERIC-MIB", "ovTrapSeverity"), ("FNET-OPTIVIEW-GENERIC-MIB", "ovTrapStatus"), ("FNET-OPTIVIEW-GENERIC-MIB", "ovTrapDescription"), ("FNET-OPTIVIEW-GENERIC-MIB", "ovTrapOffenderName"), ("FNET-OPTIVIEW-GENERIC-MIB", "ovTrapOffenderNetAddr"), ("FNET-OPTIVIEW-GENERIC-MIB", "ovTrapOffenderSubId"))
probLostDHCPLease = NotificationType((1, 3, 6, 1, 4, 1, 1226, 2, 1) + (0,12011)).setObjects(("FNET-OPTIVIEW-GENERIC-MIB", "ovTrapAgentSysName"), ("FNET-OPTIVIEW-GENERIC-MIB", "ovTrapSeverity"), ("FNET-OPTIVIEW-GENERIC-MIB", "ovTrapStatus"), ("FNET-OPTIVIEW-GENERIC-MIB", "ovTrapDescription"), ("FNET-OPTIVIEW-GENERIC-MIB", "ovTrapOffenderName"), ("FNET-OPTIVIEW-GENERIC-MIB", "ovTrapOffenderNetAddr"), ("FNET-OPTIVIEW-GENERIC-MIB", "ovTrapOffenderSubId"))
probDefaultRouterNoResp = NotificationType((1, 3, 6, 1, 4, 1, 1226, 2, 1) + (0,12012)).setObjects(("FNET-OPTIVIEW-GENERIC-MIB", "ovTrapAgentSysName"), ("FNET-OPTIVIEW-GENERIC-MIB", "ovTrapSeverity"), ("FNET-OPTIVIEW-GENERIC-MIB", "ovTrapStatus"), ("FNET-OPTIVIEW-GENERIC-MIB", "ovTrapDescription"), ("FNET-OPTIVIEW-GENERIC-MIB", "ovTrapOffenderName"), ("FNET-OPTIVIEW-GENERIC-MIB", "ovTrapOffenderNetAddr"), ("FNET-OPTIVIEW-GENERIC-MIB", "ovTrapOffenderSubId"))
probDiscoveryFull = NotificationType((1, 3, 6, 1, 4, 1, 1226, 2, 1) + (0,12013)).setObjects(("FNET-OPTIVIEW-GENERIC-MIB", "ovTrapAgentSysName"), ("FNET-OPTIVIEW-GENERIC-MIB", "ovTrapSeverity"), ("FNET-OPTIVIEW-GENERIC-MIB", "ovTrapStatus"), ("FNET-OPTIVIEW-GENERIC-MIB", "ovTrapDescription"), ("FNET-OPTIVIEW-GENERIC-MIB", "ovTrapOffenderName"), ("FNET-OPTIVIEW-GENERIC-MIB", "ovTrapOffenderNetAddr"), ("FNET-OPTIVIEW-GENERIC-MIB", "ovTrapOffenderSubId"))
probClearCounts = NotificationType((1, 3, 6, 1, 4, 1, 1226, 2, 1) + (0,12014)).setObjects(("FNET-OPTIVIEW-GENERIC-MIB", "ovTrapAgentSysName"), ("FNET-OPTIVIEW-GENERIC-MIB", "ovTrapSeverity"), ("FNET-OPTIVIEW-GENERIC-MIB", "ovTrapStatus"), ("FNET-OPTIVIEW-GENERIC-MIB", "ovTrapDescription"), ("FNET-OPTIVIEW-GENERIC-MIB", "ovTrapOffenderName"), ("FNET-OPTIVIEW-GENERIC-MIB", "ovTrapOffenderNetAddr"), ("FNET-OPTIVIEW-GENERIC-MIB", "ovTrapOffenderSubId"))
probDS1LinkDown = NotificationType((1, 3, 6, 1, 4, 1, 1226, 2, 1) + (0,12015)).setObjects(("FNET-OPTIVIEW-GENERIC-MIB", "ovTrapAgentSysName"), ("FNET-OPTIVIEW-GENERIC-MIB", "ovTrapSeverity"), ("FNET-OPTIVIEW-GENERIC-MIB", "ovTrapStatus"), ("FNET-OPTIVIEW-GENERIC-MIB", "ovTrapDescription"), ("FNET-OPTIVIEW-GENERIC-MIB", "ovTrapOffenderName"), ("FNET-OPTIVIEW-GENERIC-MIB", "ovTrapOffenderNetAddr"), ("FNET-OPTIVIEW-GENERIC-MIB", "ovTrapOffenderSubId"))
probDS1Errors = NotificationType((1, 3, 6, 1, 4, 1, 1226, 2, 1) + (0,12016)).setObjects(("FNET-OPTIVIEW-GENERIC-MIB", "ovTrapAgentSysName"), ("FNET-OPTIVIEW-GENERIC-MIB", "ovTrapSeverity"), ("FNET-OPTIVIEW-GENERIC-MIB", "ovTrapStatus"), ("FNET-OPTIVIEW-GENERIC-MIB", "ovTrapDescription"), ("FNET-OPTIVIEW-GENERIC-MIB", "ovTrapOffenderName"), ("FNET-OPTIVIEW-GENERIC-MIB", "ovTrapOffenderNetAddr"), ("FNET-OPTIVIEW-GENERIC-MIB", "ovTrapOffenderSubId"))
probDS1Alarms = NotificationType((1, 3, 6, 1, 4, 1, 1226, 2, 1) + (0,12017)).setObjects(("FNET-OPTIVIEW-GENERIC-MIB", "ovTrapAgentSysName"), ("FNET-OPTIVIEW-GENERIC-MIB", "ovTrapSeverity"), ("FNET-OPTIVIEW-GENERIC-MIB", "ovTrapStatus"), ("FNET-OPTIVIEW-GENERIC-MIB", "ovTrapDescription"), ("FNET-OPTIVIEW-GENERIC-MIB", "ovTrapOffenderName"), ("FNET-OPTIVIEW-GENERIC-MIB", "ovTrapOffenderNetAddr"), ("FNET-OPTIVIEW-GENERIC-MIB", "ovTrapOffenderSubId"))
probDS3LinkDown = NotificationType((1, 3, 6, 1, 4, 1, 1226, 2, 1) + (0,12018)).setObjects(("FNET-OPTIVIEW-GENERIC-MIB", "ovTrapAgentSysName"), ("FNET-OPTIVIEW-GENERIC-MIB", "ovTrapSeverity"), ("FNET-OPTIVIEW-GENERIC-MIB", "ovTrapStatus"), ("FNET-OPTIVIEW-GENERIC-MIB", "ovTrapDescription"), ("FNET-OPTIVIEW-GENERIC-MIB", "ovTrapOffenderName"), ("FNET-OPTIVIEW-GENERIC-MIB", "ovTrapOffenderNetAddr"), ("FNET-OPTIVIEW-GENERIC-MIB", "ovTrapOffenderSubId"))
probDS3Errors = NotificationType((1, 3, 6, 1, 4, 1, 1226, 2, 1) + (0,12019)).setObjects(("FNET-OPTIVIEW-GENERIC-MIB", "ovTrapAgentSysName"), ("FNET-OPTIVIEW-GENERIC-MIB", "ovTrapSeverity"), ("FNET-OPTIVIEW-GENERIC-MIB", "ovTrapStatus"), ("FNET-OPTIVIEW-GENERIC-MIB", "ovTrapDescription"), ("FNET-OPTIVIEW-GENERIC-MIB", "ovTrapOffenderName"), ("FNET-OPTIVIEW-GENERIC-MIB", "ovTrapOffenderNetAddr"), ("FNET-OPTIVIEW-GENERIC-MIB", "ovTrapOffenderSubId"))
probDS3Alarms = NotificationType((1, 3, 6, 1, 4, 1, 1226, 2, 1) + (0,12020)).setObjects(("FNET-OPTIVIEW-GENERIC-MIB", "ovTrapAgentSysName"), ("FNET-OPTIVIEW-GENERIC-MIB", "ovTrapSeverity"), ("FNET-OPTIVIEW-GENERIC-MIB", "ovTrapStatus"), ("FNET-OPTIVIEW-GENERIC-MIB", "ovTrapDescription"), ("FNET-OPTIVIEW-GENERIC-MIB", "ovTrapOffenderName"), ("FNET-OPTIVIEW-GENERIC-MIB", "ovTrapOffenderNetAddr"), ("FNET-OPTIVIEW-GENERIC-MIB", "ovTrapOffenderSubId"))
probFrLmiLinkDown = NotificationType((1, 3, 6, 1, 4, 1, 1226, 2, 1) + (0,12021)).setObjects(("FNET-OPTIVIEW-GENERIC-MIB", "ovTrapAgentSysName"), ("FNET-OPTIVIEW-GENERIC-MIB", "ovTrapSeverity"), ("FNET-OPTIVIEW-GENERIC-MIB", "ovTrapStatus"), ("FNET-OPTIVIEW-GENERIC-MIB", "ovTrapDescription"), ("FNET-OPTIVIEW-GENERIC-MIB", "ovTrapOffenderName"), ("FNET-OPTIVIEW-GENERIC-MIB", "ovTrapOffenderNetAddr"), ("FNET-OPTIVIEW-GENERIC-MIB", "ovTrapOffenderSubId"))
probFrLmiErrors = NotificationType((1, 3, 6, 1, 4, 1, 1226, 2, 1) + (0,12022)).setObjects(("FNET-OPTIVIEW-GENERIC-MIB", "ovTrapAgentSysName"), ("FNET-OPTIVIEW-GENERIC-MIB", "ovTrapSeverity"), ("FNET-OPTIVIEW-GENERIC-MIB", "ovTrapStatus"), ("FNET-OPTIVIEW-GENERIC-MIB", "ovTrapDescription"), ("FNET-OPTIVIEW-GENERIC-MIB", "ovTrapOffenderName"), ("FNET-OPTIVIEW-GENERIC-MIB", "ovTrapOffenderNetAddr"), ("FNET-OPTIVIEW-GENERIC-MIB", "ovTrapOffenderSubId"))
probFrErrorsDetected = NotificationType((1, 3, 6, 1, 4, 1, 1226, 2, 1) + (0,12023)).setObjects(("FNET-OPTIVIEW-GENERIC-MIB", "ovTrapAgentSysName"), ("FNET-OPTIVIEW-GENERIC-MIB", "ovTrapSeverity"), ("FNET-OPTIVIEW-GENERIC-MIB", "ovTrapStatus"), ("FNET-OPTIVIEW-GENERIC-MIB", "ovTrapDescription"), ("FNET-OPTIVIEW-GENERIC-MIB", "ovTrapOffenderName"), ("FNET-OPTIVIEW-GENERIC-MIB", "ovTrapOffenderNetAddr"), ("FNET-OPTIVIEW-GENERIC-MIB", "ovTrapOffenderSubId"))
probVcDown = NotificationType((1, 3, 6, 1, 4, 1, 1226, 2, 1) + (0,12024)).setObjects(("FNET-OPTIVIEW-GENERIC-MIB", "ovTrapAgentSysName"), ("FNET-OPTIVIEW-GENERIC-MIB", "ovTrapSeverity"), ("FNET-OPTIVIEW-GENERIC-MIB", "ovTrapStatus"), ("FNET-OPTIVIEW-GENERIC-MIB", "ovTrapDescription"), ("FNET-OPTIVIEW-GENERIC-MIB", "ovTrapOffenderName"), ("FNET-OPTIVIEW-GENERIC-MIB", "ovTrapOffenderNetAddr"), ("FNET-OPTIVIEW-GENERIC-MIB", "ovTrapOffenderSubId"))
probInvalidDlci = NotificationType((1, 3, 6, 1, 4, 1, 1226, 2, 1) + (0,12025)).setObjects(("FNET-OPTIVIEW-GENERIC-MIB", "ovTrapAgentSysName"), ("FNET-OPTIVIEW-GENERIC-MIB", "ovTrapSeverity"), ("FNET-OPTIVIEW-GENERIC-MIB", "ovTrapStatus"), ("FNET-OPTIVIEW-GENERIC-MIB", "ovTrapDescription"), ("FNET-OPTIVIEW-GENERIC-MIB", "ovTrapOffenderName"), ("FNET-OPTIVIEW-GENERIC-MIB", "ovTrapOffenderNetAddr"), ("FNET-OPTIVIEW-GENERIC-MIB", "ovTrapOffenderSubId"))
probFrDEUnderCIRUtilization = NotificationType((1, 3, 6, 1, 4, 1, 1226, 2, 1) + (0,12026)).setObjects(("FNET-OPTIVIEW-GENERIC-MIB", "ovTrapAgentSysName"), ("FNET-OPTIVIEW-GENERIC-MIB", "ovTrapSeverity"), ("FNET-OPTIVIEW-GENERIC-MIB", "ovTrapStatus"), ("FNET-OPTIVIEW-GENERIC-MIB", "ovTrapDescription"), ("FNET-OPTIVIEW-GENERIC-MIB", "ovTrapOffenderName"), ("FNET-OPTIVIEW-GENERIC-MIB", "ovTrapOffenderNetAddr"), ("FNET-OPTIVIEW-GENERIC-MIB", "ovTrapOffenderSubId"))
probHdlcLinkDown = NotificationType((1, 3, 6, 1, 4, 1, 1226, 2, 1) + (0,12027)).setObjects(("FNET-OPTIVIEW-GENERIC-MIB", "ovTrapAgentSysName"), ("FNET-OPTIVIEW-GENERIC-MIB", "ovTrapSeverity"), ("FNET-OPTIVIEW-GENERIC-MIB", "ovTrapStatus"), ("FNET-OPTIVIEW-GENERIC-MIB", "ovTrapDescription"), ("FNET-OPTIVIEW-GENERIC-MIB", "ovTrapOffenderName"), ("FNET-OPTIVIEW-GENERIC-MIB", "ovTrapOffenderNetAddr"), ("FNET-OPTIVIEW-GENERIC-MIB", "ovTrapOffenderSubId"))
probHdlcErrorsDetected = NotificationType((1, 3, 6, 1, 4, 1, 1226, 2, 1) + (0,12028)).setObjects(("FNET-OPTIVIEW-GENERIC-MIB", "ovTrapAgentSysName"), ("FNET-OPTIVIEW-GENERIC-MIB", "ovTrapSeverity"), ("FNET-OPTIVIEW-GENERIC-MIB", "ovTrapStatus"), ("FNET-OPTIVIEW-GENERIC-MIB", "ovTrapDescription"), ("FNET-OPTIVIEW-GENERIC-MIB", "ovTrapOffenderName"), ("FNET-OPTIVIEW-GENERIC-MIB", "ovTrapOffenderNetAddr"), ("FNET-OPTIVIEW-GENERIC-MIB", "ovTrapOffenderSubId"))
probFrDteDceReversed = NotificationType((1, 3, 6, 1, 4, 1, 1226, 2, 1) + (0,12029)).setObjects(("FNET-OPTIVIEW-GENERIC-MIB", "ovTrapAgentSysName"), ("FNET-OPTIVIEW-GENERIC-MIB", "ovTrapSeverity"), ("FNET-OPTIVIEW-GENERIC-MIB", "ovTrapStatus"), ("FNET-OPTIVIEW-GENERIC-MIB", "ovTrapDescription"), ("FNET-OPTIVIEW-GENERIC-MIB", "ovTrapOffenderName"), ("FNET-OPTIVIEW-GENERIC-MIB", "ovTrapOffenderNetAddr"), ("FNET-OPTIVIEW-GENERIC-MIB", "ovTrapOffenderSubId"))
probKeyDevPingLatency = NotificationType((1, 3, 6, 1, 4, 1, 1226, 2, 1) + (0,12030)).setObjects(("FNET-OPTIVIEW-GENERIC-MIB", "ovTrapAgentSysName"), ("FNET-OPTIVIEW-GENERIC-MIB", "ovTrapSeverity"), ("FNET-OPTIVIEW-GENERIC-MIB", "ovTrapStatus"), ("FNET-OPTIVIEW-GENERIC-MIB", "ovTrapDescription"), ("FNET-OPTIVIEW-GENERIC-MIB", "ovTrapOffenderName"), ("FNET-OPTIVIEW-GENERIC-MIB", "ovTrapOffenderNetAddr"), ("FNET-OPTIVIEW-GENERIC-MIB", "ovTrapOffenderSubId"))
mibBuilder.exportSymbols("FNET-OPTIVIEW-WAN-MIB", probFrDteDceReversed=probFrDteDceReversed, probKeyDevPingLatency=probKeyDevPingLatency, probAtmLinkDown=probAtmLinkDown, probDefaultRouterNoResp=probDefaultRouterNoResp, probDS3LinkDown=probDS3LinkDown, probAtmErrors=probAtmErrors, probVcDown=probVcDown, probDNSServerNoResp=probDNSServerNoResp, probDS3Alarms=probDS3Alarms, probPDUCRCErrors=probPDUCRCErrors, probFrLmiErrors=probFrLmiErrors, probClearCounts=probClearCounts, probFrDEUnderCIRUtilization=probFrDEUnderCIRUtilization, probInvalidDlci=probInvalidDlci, probDS1Alarms=probDS1Alarms, probFrErrorsDetected=probFrErrorsDetected, probPosLinkDown=probPosLinkDown, probLinkUtilization=probLinkUtilization, probLostDHCPLease=probLostDHCPLease, probDS1Errors=probDS1Errors, probDiscoveryFull=probDiscoveryFull, probSonetLinkDown=probSonetLinkDown, probPosErrorsDetected=probPosErrorsDetected, probHdlcLinkDown=probHdlcLinkDown, probSonetErrors=probSonetErrors, probDNSServerNowUsing=probDNSServerNowUsing, probSonetAlarms=probSonetAlarms, probHdlcErrorsDetected=probHdlcErrorsDetected, probDS3Errors=probDS3Errors, probFrLmiLinkDown=probFrLmiLinkDown, probDS1LinkDown=probDS1LinkDown)
| 360.666667
| 1,188
| 0.75283
| 1,766
| 17,312
| 7.379955
| 0.094564
| 0.203483
| 0.317809
| 0.367989
| 0.805494
| 0.788614
| 0.788614
| 0.770352
| 0.770352
| 0.770352
| 0
| 0.039483
| 0.056377
| 17,312
| 47
| 1,189
| 368.340426
| 0.758325
| 0.019409
| 0
| 0
| 0
| 0
| 0.575765
| 0.364649
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| false
| 0
| 0.2
| 0
| 0.2
| 0
| 0
| 0
| 0
| null | 1
| 1
| 1
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 11
|
9bb2a3c185ff22715f030548bd0e3e563ac7b0de
| 18,069
|
py
|
Python
|
sleuthpr/migrations/0001_initial.py
|
sleuth-io/sleuth-pr
|
aecbd8ff993cf77635bbd50dc2a4d7994642f674
|
[
"Apache-2.0"
] | 6
|
2020-09-18T07:08:30.000Z
|
2020-09-29T18:08:49.000Z
|
sleuthpr/migrations/0001_initial.py
|
sleuth-io/sleuth-pr
|
aecbd8ff993cf77635bbd50dc2a4d7994642f674
|
[
"Apache-2.0"
] | null | null | null |
sleuthpr/migrations/0001_initial.py
|
sleuth-io/sleuth-pr
|
aecbd8ff993cf77635bbd50dc2a4d7994642f674
|
[
"Apache-2.0"
] | null | null | null |
# Generated by Django 3.1.1 on 2020-09-29 23:25
import django.db.models.deletion
import django.utils.timezone
from django.db import migrations
from django.db import models
class Migration(migrations.Migration):
initial = True
dependencies = []
operations = [
migrations.CreateModel(
name="Condition",
fields=[
(
"id",
models.AutoField(
auto_created=True,
primary_key=True,
serialize=False,
verbose_name="ID",
),
),
(
"description",
models.TextField(blank=True, max_length=16384, verbose_name="description"),
),
(
"expression",
models.TextField(blank=True, max_length=16384, verbose_name="expression"),
),
(
"on",
models.DateTimeField(
db_index=True,
default=django.utils.timezone.now,
verbose_name="created on",
),
),
("order", models.IntegerField()),
],
),
migrations.CreateModel(
name="ExternalUser",
fields=[
(
"id",
models.AutoField(
auto_created=True,
primary_key=True,
serialize=False,
verbose_name="ID",
),
),
(
"name",
models.CharField(db_index=True, max_length=512, null=True, verbose_name="name"),
),
(
"email",
models.EmailField(db_index=True, max_length=512, null=True, verbose_name="email"),
),
(
"username",
models.CharField(
db_index=True,
max_length=512,
null=True,
verbose_name="username",
),
),
(
"remote_id",
models.CharField(db_index=True, max_length=512, null=True, verbose_name="uid"),
),
(
"installation",
models.CharField(db_index=True, max_length=255, verbose_name="provider"),
),
],
),
migrations.CreateModel(
name="Installation",
fields=[
(
"id",
models.AutoField(
auto_created=True,
primary_key=True,
serialize=False,
verbose_name="ID",
),
),
("remote_id", models.CharField(db_index=True, max_length=512)),
("target_type", models.CharField(max_length=50)),
("target_id", models.CharField(max_length=255)),
(
"on",
models.DateTimeField(
db_index=True,
default=django.utils.timezone.now,
verbose_name="created on",
),
),
(
"provider",
models.CharField(
choices=[
("github", "GitHub"),
("gitlab", "GitLab"),
("bitbucket", "Bitbucket"),
],
max_length=50,
),
),
("active", models.BooleanField(default=True)),
],
),
migrations.CreateModel(
name="PullRequest",
fields=[
(
"id",
models.AutoField(
auto_created=True,
primary_key=True,
serialize=False,
verbose_name="ID",
),
),
(
"title",
models.TextField(
db_index=True,
default="",
max_length=16384,
verbose_name="title",
),
),
(
"description",
models.TextField(blank=True, max_length=16384, verbose_name="description"),
),
(
"on",
models.DateTimeField(
db_index=True,
default=django.utils.timezone.now,
verbose_name="created on",
),
),
("remote_id", models.CharField(db_index=True, max_length=512)),
(
"source_branch_name",
models.CharField(
blank=True,
db_index=True,
max_length=1024,
null=True,
verbose_name="source branch name",
),
),
(
"source_sha",
models.CharField(
blank=True,
db_index=True,
max_length=1024,
null=True,
verbose_name="source sha",
),
),
(
"base_branch_name",
models.CharField(
blank=True,
db_index=True,
max_length=1024,
null=True,
verbose_name="base branch name",
),
),
(
"url",
models.URLField(blank=True, max_length=1024, null=True, verbose_name="url"),
),
("draft", models.BooleanField(default=False)),
("merged", models.BooleanField(default=False)),
(
"mergeable",
models.CharField(
choices=[
("true", "True"),
("false", "False"),
("unknown", "Unknown"),
],
default="unknown",
max_length=15,
),
),
(
"rebaseable",
models.CharField(
choices=[
("true", "True"),
("false", "False"),
("unknown", "Unknown"),
],
default="unknown",
max_length=15,
),
),
(
"status",
models.CharField(blank=True, max_length=128, null=True, verbose_name="status"),
),
(
"author",
models.ForeignKey(
null=True,
on_delete=django.db.models.deletion.SET_NULL,
related_name="authored_pull_requests",
to="sleuthpr.externaluser",
verbose_name="author",
),
),
],
),
migrations.CreateModel(
name="Repository",
fields=[
(
"id",
models.AutoField(
auto_created=True,
primary_key=True,
serialize=False,
verbose_name="ID",
),
),
("full_name", models.CharField(max_length=255)),
(
"remote_id",
models.CharField(db_index=True, max_length=512, null=True),
),
(
"installation",
models.ForeignKey(
on_delete=django.db.models.deletion.CASCADE,
related_name="repositories",
to="sleuthpr.installation",
verbose_name="installation",
),
),
],
),
migrations.CreateModel(
name="Rule",
fields=[
(
"id",
models.AutoField(
auto_created=True,
primary_key=True,
serialize=False,
verbose_name="ID",
),
),
(
"title",
models.CharField(db_index=True, default="", max_length=255, verbose_name="title"),
),
(
"description",
models.TextField(blank=True, max_length=16384, verbose_name="description"),
),
(
"on",
models.DateTimeField(
db_index=True,
default=django.utils.timezone.now,
verbose_name="created on",
),
),
("order", models.IntegerField()),
(
"repository",
models.ForeignKey(
on_delete=django.db.models.deletion.CASCADE,
related_name="rules",
to="sleuthpr.repository",
verbose_name="repository",
),
),
],
),
migrations.CreateModel(
name="Trigger",
fields=[
(
"id",
models.AutoField(
auto_created=True,
primary_key=True,
serialize=False,
verbose_name="ID",
),
),
("type", models.CharField(max_length=255)),
(
"rule",
models.ForeignKey(
on_delete=django.db.models.deletion.CASCADE,
related_name="triggers",
to="sleuthpr.rule",
verbose_name="installation",
),
),
],
),
migrations.CreateModel(
name="PullRequestReviewer",
fields=[
(
"id",
models.AutoField(
auto_created=True,
primary_key=True,
serialize=False,
verbose_name="ID",
),
),
(
"pull_request",
models.ForeignKey(
on_delete=django.db.models.deletion.CASCADE,
related_name="reviewers",
to="sleuthpr.pullrequest",
verbose_name="pull_request",
),
),
(
"user",
models.ForeignKey(
on_delete=django.db.models.deletion.CASCADE,
related_name="reviewer_for_pull_requests",
to="sleuthpr.externaluser",
verbose_name="user",
),
),
],
),
migrations.CreateModel(
name="PullRequestLabel",
fields=[
(
"id",
models.AutoField(
auto_created=True,
primary_key=True,
serialize=False,
verbose_name="ID",
),
),
(
"value",
models.CharField(db_index=True, max_length=255, verbose_name="value"),
),
(
"pull_request",
models.ForeignKey(
on_delete=django.db.models.deletion.CASCADE,
related_name="labels",
to="sleuthpr.pullrequest",
verbose_name="pull_request",
),
),
],
),
migrations.CreateModel(
name="PullRequestAssignee",
fields=[
(
"id",
models.AutoField(
auto_created=True,
primary_key=True,
serialize=False,
verbose_name="ID",
),
),
(
"pull_request",
models.ForeignKey(
null=True,
on_delete=django.db.models.deletion.CASCADE,
related_name="assignees",
to="sleuthpr.pullrequest",
verbose_name="pull_request",
),
),
(
"user",
models.ForeignKey(
on_delete=django.db.models.deletion.CASCADE,
related_name="assigned_pull_requests",
to="sleuthpr.externaluser",
verbose_name="user",
),
),
],
),
migrations.AddField(
model_name="pullrequest",
name="repository",
field=models.ForeignKey(
on_delete=django.db.models.deletion.CASCADE,
related_name="pull_requests",
to="sleuthpr.repository",
verbose_name="repository",
),
),
migrations.CreateModel(
name="ConditionCheckRun",
fields=[
(
"id",
models.AutoField(
auto_created=True,
primary_key=True,
serialize=False,
verbose_name="ID",
),
),
(
"status",
models.CharField(
choices=[("success", "Success"), ("failure", "Failure")],
db_index=True,
max_length=50,
),
),
("remote_id", models.CharField(db_index=True, max_length=512)),
(
"condition",
models.ForeignKey(
null=True,
on_delete=django.db.models.deletion.CASCADE,
related_name="checks",
to="sleuthpr.condition",
verbose_name="condition",
),
),
(
"pull_request",
models.ForeignKey(
null=True,
on_delete=django.db.models.deletion.CASCADE,
related_name="checks",
to="sleuthpr.pullrequest",
verbose_name="pull_request",
),
),
],
),
migrations.AddField(
model_name="condition",
name="rule",
field=models.ForeignKey(
on_delete=django.db.models.deletion.CASCADE,
related_name="conditions",
to="sleuthpr.rule",
verbose_name="rule",
),
),
migrations.CreateModel(
name="Action",
fields=[
(
"id",
models.AutoField(
auto_created=True,
primary_key=True,
serialize=False,
verbose_name="ID",
),
),
(
"type",
models.CharField(db_index=True, max_length=255, verbose_name="type"),
),
(
"description",
models.TextField(blank=True, max_length=16384, verbose_name="description"),
),
(
"on",
models.DateTimeField(
db_index=True,
default=django.utils.timezone.now,
verbose_name="created on",
),
),
("parameters", models.JSONField()),
("order", models.IntegerField()),
(
"rule",
models.ForeignKey(
on_delete=django.db.models.deletion.CASCADE,
related_name="actions",
to="sleuthpr.rule",
verbose_name="rule",
),
),
],
),
]
| 34.681382
| 102
| 0.345177
| 1,058
| 18,069
| 5.718336
| 0.123819
| 0.090909
| 0.047273
| 0.054545
| 0.766612
| 0.742645
| 0.708264
| 0.700826
| 0.679008
| 0.629587
| 0
| 0.015148
| 0.565222
| 18,069
| 520
| 103
| 34.748077
| 0.754964
| 0.00249
| 0
| 0.72179
| 1
| 0
| 0.08534
| 0.008545
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| false
| 0
| 0.007782
| 0
| 0.015564
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 1
| 1
| 1
| 0
| 1
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 1
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 7
|
32ff3f856de53d90ba83cf8960a9f1563aeafc58
| 110,766
|
py
|
Python
|
tests/test_mssql_specific.py
|
burakuyar/simple-ddl-parser
|
f5714e0947a7cb48c202a91969b6e6d0d9a64d24
|
[
"MIT"
] | 46
|
2021-03-07T02:21:29.000Z
|
2022-03-24T12:45:03.000Z
|
tests/test_mssql_specific.py
|
burakuyar/simple-ddl-parser
|
f5714e0947a7cb48c202a91969b6e6d0d9a64d24
|
[
"MIT"
] | 53
|
2021-03-08T08:50:00.000Z
|
2022-03-29T06:11:13.000Z
|
tests/test_mssql_specific.py
|
burakuyar/simple-ddl-parser
|
f5714e0947a7cb48c202a91969b6e6d0d9a64d24
|
[
"MIT"
] | 12
|
2021-06-21T16:14:17.000Z
|
2022-03-25T13:52:35.000Z
|
from simple_ddl_parser import DDLParser
def test_int_identity_type():
ddl = """
CREATE TABLE sqlserverlist (
id INT IDENTITY (1,1) PRIMARY KEY, -- NOTE
company_id BIGINT ,
)
"""
result = DDLParser(ddl).run(group_by_type=True)
expected = {
"comments": [" NOTE"],
"ddl_properties": [],
"sequences": [],
"domains": [],
"schemas": [],
"tables": [
{
"alter": {},
"checks": [],
"columns": [
{
"check": None,
"default": None,
"name": "id",
"nullable": False,
"references": None,
"size": (1, 1),
"type": "INT IDENTITY",
"unique": False,
},
{
"check": None,
"default": None,
"name": "company_id",
"nullable": True,
"references": None,
"size": None,
"type": "BIGINT",
"unique": False,
},
],
"index": [],
"partitioned_by": [],
"primary_key": ["id"],
"schema": None,
"table_name": "sqlserverlist",
"tablespace": None,
}
],
"types": [],
}
assert expected == result
def test_mssql_foreign_ref_in_column():
ddl = """
CREATE TABLE sqlserverlist (
id INT IDENTITY (1,1) PRIMARY KEY, -- NOTE
company_id BIGINT ,
primary_id INT FOREIGN KEY REFERENCES Persons(PersonID), -- ADD THIS COLUMN FOR THE FOREIGN KEY
)
"""
result = DDLParser(ddl).run(group_by_type=True)
expected = {
"comments": [" NOTE"],
"ddl_properties": [],
"sequences": [],
"domains": [],
"schemas": [],
"tables": [
{
"alter": {},
"checks": [],
"columns": [
{
"check": None,
"default": None,
"name": "id",
"nullable": False,
"references": None,
"size": (1, 1),
"type": "INT IDENTITY",
"unique": False,
},
{
"check": None,
"default": None,
"name": "company_id",
"nullable": True,
"references": None,
"size": None,
"type": "BIGINT",
"unique": False,
},
{
"check": None,
"default": None,
"name": "primary_id",
"nullable": True,
"references": {
"column": "PersonID",
"deferrable_initially": None,
"on_delete": None,
"on_update": None,
"schema": None,
"table": "Persons",
},
"size": None,
"type": "INT",
"unique": False,
},
],
"index": [],
"partitioned_by": [],
"primary_key": ["id"],
"schema": None,
"table_name": "sqlserverlist",
"tablespace": None,
}
],
"types": [],
}
assert expected == result
def test_max_supported_as_column_size():
ddl = """
CREATE TABLE sqlserverlist (
user_account VARCHAR(8000) NOT NULL,
user_first_name VARCHAR(max) NOT NULL,
)
"""
result = DDLParser(ddl).run(group_by_type=True)
expected = {
"sequences": [],
"ddl_properties": [],
"domains": [],
"schemas": [],
"tables": [
{
"alter": {},
"checks": [],
"columns": [
{
"check": None,
"default": None,
"name": "user_account",
"nullable": False,
"references": None,
"size": 8000,
"type": "VARCHAR",
"unique": False,
},
{
"check": None,
"default": None,
"name": "user_first_name",
"nullable": False,
"references": None,
"size": "max",
"type": "VARCHAR",
"unique": False,
},
],
"index": [],
"partitioned_by": [],
"primary_key": [],
"schema": None,
"table_name": "sqlserverlist",
"tablespace": None,
}
],
"types": [],
}
assert expected == result
def test_constraint_unique():
ddl = """
CREATE TABLE sqlserverlist (
id INT IDENTITY (1,1) PRIMARY KEY, -- NOTE
company_id BIGINT ,
user_last_name VARBINARY(8000) NOT NULL,
CONSTRAINT UC_sqlserverlist_last_name UNIQUE (company_id,user_last_name)
)
"""
result = DDLParser(ddl).run(group_by_type=True)
expected = {
"comments": [" NOTE"],
"sequences": [],
"ddl_properties": [],
"domains": [],
"schemas": [],
"tables": [
{
"alter": {},
"checks": [],
"columns": [
{
"check": None,
"default": None,
"name": "id",
"nullable": False,
"references": None,
"size": (1, 1),
"type": "INT IDENTITY",
"unique": False,
},
{
"check": None,
"default": None,
"name": "company_id",
"nullable": True,
"references": None,
"size": None,
"type": "BIGINT",
"unique": True,
},
{
"check": None,
"default": None,
"name": "user_last_name",
"nullable": False,
"references": None,
"size": 8000,
"type": "VARBINARY",
"unique": True,
},
],
"index": [],
"partitioned_by": [],
"primary_key": ["id"],
"schema": None,
"table_name": "sqlserverlist",
"tablespace": None,
"constraints": {
"uniques": [
{
"columns": ["company_id", "user_last_name"],
"constraint_name": "UC_sqlserverlist_last_name",
}
]
},
}
],
"types": [],
}
assert expected == result
def test_constraint_unique_none():
ddl = """
CREATE TABLE sqlserverlist (
id INT IDENTITY (1,1) PRIMARY KEY, -- NOTE
company_id BIGINT ,
user_last_name VARBINARY(8000) NOT NULL
)
"""
result = DDLParser(ddl).run(group_by_type=True, output_mode="mssql")
expected = {
"comments": [" NOTE"],
"sequences": [],
"ddl_properties": [],
"domains": [],
"schemas": [],
"tables": [
{
"alter": {},
"checks": [],
"columns": [
{
"check": None,
"default": None,
"name": "id",
"nullable": False,
"references": None,
"size": (1, 1),
"type": "INT IDENTITY",
"unique": False,
},
{
"check": None,
"default": None,
"name": "company_id",
"nullable": True,
"references": None,
"size": None,
"type": "BIGINT",
"unique": False,
},
{
"check": None,
"default": None,
"name": "user_last_name",
"nullable": False,
"references": None,
"size": 8000,
"type": "VARBINARY",
"unique": False,
},
],
"index": [],
"partitioned_by": [],
"primary_key": ["id"],
"schema": None,
"table_name": "sqlserverlist",
"tablespace": None,
"constraints": {"uniques": None, "checks": None, "references": None},
}
],
"types": [],
}
assert expected == result
def test_two_unique_constructs():
ddl = """
CREATE TABLE sqlserverlist (
id INT IDENTITY (1,1) PRIMARY KEY, -- NOTE
company_id BIGINT ,
primary_id INT FOREIGN KEY REFERENCES Persons(PersonID), -- ADD THIS COLUMN FOR THE FOREIGN KEY
age TINYINT NULL UNIQUE,
days_active SMALLINT NOT NULL,
user_origin_of_birth char(255),
user_account VARCHAR(8000) NOT NULL,
user_first_name VARCHAR(max) NOT NULL,
user_last_name VARBINARY(8000) NOT NULL,
user_street NCHAR(400) NULL,
user_city NVARCHAR(4000),
about_user NTEXT NULL,
user_description TEXT,
starting_funds FLOAT(53) NULL,
extra_funds REAL,
current_funds DECIMAL (38,20),
ending_funds SMALLMONEY NOT NULL,
birth_date DATE NOT NULL,
time_of_birth TIME(7),
enrollment_date SMALLDATETIME,
delete_date DATETIME NULL,
create_date DATETIME2(7) NOT NULL,
user_time_zone DATETIMEOFFSET(7),
oder_date date DEFAULT GETDATE(), -- added to demonstrate sql sever Defaults
country varchar(255) DEFAULT 'Sandnes', -- added to demonstrate sql sever Defaults
active bit NULL,
home_size GEOMETRY, -- Sql Server Defaults to Null
user_photo IMAGE, -- Sql Server Defaults to Null
--UNIQUE (id),
CONSTRAINT UC_date UNIQUE (delete_date,create_date),
CONSTRAINT UC_sqlserverlist_last_name UNIQUE (company_id,user_last_name),
CONSTRAINT CHK_Person_Age_under CHECK (days_active<=18 AND user_city='New York'),
CONSTRAINT FK_Person_Age_under FOREIGN KEY (id)REFERENCES Persons(PersonID)
)
"""
result = DDLParser(ddl).run(group_by_type=True)
expected = {
"comments": [" NOTE"],
"sequences": [],
"ddl_properties": [],
"domains": [],
"schemas": [],
"tables": [
{
"alter": {},
"checks": [
{
"constraint_name": "CHK_Person_Age_under",
"statement": "days_active<=18 AND user_city= 'New York'",
}
],
"columns": [
{
"check": None,
"default": None,
"name": "id",
"nullable": False,
"references": None,
"size": (1, 1),
"type": "INT IDENTITY",
"unique": False,
},
{
"check": None,
"default": None,
"name": "company_id",
"nullable": True,
"references": None,
"size": None,
"type": "BIGINT",
"unique": True,
},
{
"check": None,
"default": None,
"name": "primary_id",
"nullable": True,
"references": {
"column": "PersonID",
"deferrable_initially": None,
"on_delete": None,
"on_update": None,
"schema": None,
"table": "Persons",
},
"size": None,
"type": "INT",
"unique": False,
},
{
"check": None,
"default": None,
"name": "age",
"nullable": True,
"references": None,
"size": None,
"type": "TINYINT",
"unique": True,
},
{
"check": None,
"default": None,
"name": "days_active",
"nullable": False,
"references": None,
"size": None,
"type": "SMALLINT",
"unique": False,
},
{
"check": None,
"default": None,
"name": "user_origin_of_birth",
"nullable": True,
"references": None,
"size": 255,
"type": "char",
"unique": False,
},
{
"check": None,
"default": None,
"name": "user_account",
"nullable": False,
"references": None,
"size": 8000,
"type": "VARCHAR",
"unique": False,
},
{
"check": None,
"default": None,
"name": "user_first_name",
"nullable": False,
"references": None,
"size": "max",
"type": "VARCHAR",
"unique": False,
},
{
"check": None,
"default": None,
"name": "user_last_name",
"nullable": False,
"references": None,
"size": 8000,
"type": "VARBINARY",
"unique": True,
},
{
"check": None,
"default": None,
"name": "user_street",
"nullable": True,
"references": None,
"size": 400,
"type": "NCHAR",
"unique": False,
},
{
"check": None,
"default": None,
"name": "user_city",
"nullable": True,
"references": None,
"size": 4000,
"type": "NVARCHAR",
"unique": False,
},
{
"check": None,
"default": None,
"name": "about_user",
"nullable": True,
"references": None,
"size": None,
"type": "NTEXT",
"unique": False,
},
{
"check": None,
"default": None,
"name": "user_description",
"nullable": True,
"references": None,
"size": None,
"type": "TEXT",
"unique": False,
},
{
"check": None,
"default": None,
"name": "starting_funds",
"nullable": True,
"references": None,
"size": 53,
"type": "FLOAT",
"unique": False,
},
{
"check": None,
"default": None,
"name": "extra_funds",
"nullable": True,
"references": None,
"size": None,
"type": "REAL",
"unique": False,
},
{
"check": None,
"default": None,
"name": "current_funds",
"nullable": True,
"references": None,
"size": (38, 20),
"type": "DECIMAL",
"unique": False,
},
{
"check": None,
"default": None,
"name": "ending_funds",
"nullable": False,
"references": None,
"size": None,
"type": "SMALLMONEY",
"unique": False,
},
{
"check": None,
"default": None,
"name": "birth_date",
"nullable": False,
"references": None,
"size": None,
"type": "DATE",
"unique": False,
},
{
"check": None,
"default": None,
"name": "time_of_birth",
"nullable": True,
"references": None,
"size": 7,
"type": "TIME",
"unique": False,
},
{
"check": None,
"default": None,
"name": "enrollment_date",
"nullable": True,
"references": None,
"size": None,
"type": "SMALLDATETIME",
"unique": False,
},
{
"check": None,
"default": None,
"name": "delete_date",
"nullable": True,
"references": None,
"size": None,
"type": "DATETIME",
"unique": False,
},
{
"check": None,
"default": None,
"name": "create_date",
"nullable": False,
"references": None,
"size": 7,
"type": "DATETIME2",
"unique": False,
},
{
"check": None,
"default": None,
"name": "user_time_zone",
"nullable": True,
"references": None,
"size": 7,
"type": "DATETIMEOFFSET",
"unique": False,
},
{
"check": None,
"default": "GETDATE()",
"name": "oder_date",
"nullable": True,
"references": None,
"size": None,
"type": "date",
"unique": False,
},
{
"check": None,
"default": "'Sandnes'",
"name": "country",
"nullable": True,
"references": None,
"size": 255,
"type": "varchar",
"unique": False,
},
{
"check": None,
"default": None,
"name": "active",
"nullable": True,
"references": None,
"size": None,
"type": "bit",
"unique": False,
},
{
"check": None,
"default": None,
"name": "home_size",
"nullable": True,
"references": None,
"size": None,
"type": "GEOMETRY",
"unique": False,
},
{
"check": None,
"default": None,
"name": "user_photo",
"nullable": True,
"references": None,
"size": None,
"type": "IMAGE",
"unique": False,
},
],
"constraints": {
"checks": [
{
"constraint_name": "CHK_Person_Age_under",
"statement": "days_active<=18 AND " "user_city= 'New York'",
}
],
"references": [
{
"columns": ["PersonID"],
"constraint_name": "FK_Person_Age_under",
"deferrable_initially": None,
"on_delete": None,
"on_update": None,
"schema": None,
"table": "Persons",
}
],
"uniques": [
{
"columns": ["delete_date", "create_date"],
"constraint_name": "UC_date",
},
{
"columns": ["company_id", "user_last_name"],
"constraint_name": "UC_sqlserverlist_last_name",
},
],
},
"index": [],
"partitioned_by": [],
"primary_key": ["id"],
"schema": None,
"table_name": "sqlserverlist",
"tablespace": None,
}
],
"types": [],
}
assert expected == result
def test_foreign_keys():
ddl = """
CREATE TABLE sqlserverlist (
id INT IDENTITY (1,1) PRIMARY KEY, -- NOTE
company_id BIGINT ,
primary_id INT FOREIGN KEY REFERENCES Persons(PersonID), -- ADD THIS COLUMN FOR THE FOREIGN KEY
age TINYINT NULL UNIQUE,
days_active SMALLINT NOT NULL,
user_origin_of_birth char(255),
user_account VARCHAR(8000) NOT NULL,
user_first_name VARCHAR(max) NOT NULL,
user_last_name VARBINARY(8000) NOT NULL,
user_street NCHAR(400) NULL,
user_city NVARCHAR(4000),
about_user NTEXT NULL,
user_description TEXT,
starting_funds FLOAT(53) NULL,
extra_funds REAL,
current_funds DECIMAL (38,20),
ending_funds SMALLMONEY NOT NULL,
birth_date DATE NOT NULL,
time_of_birth TIME(7),
oder_date date DEFAULT GETDATE(), -- added to demonstrate sql sever Defaults
country varchar(255) DEFAULT 'Sandnes', -- added to demonstrate sql sever Defaults
active bit NULL,
home_size GEOMETRY, -- Sql Server Defaults to Null
user_photo IMAGE, -- Sql Server Defaults to Null
--UNIQUE (id),
CONSTRAINT UC_sqlserverlist_last_name UNIQUE (company_id,user_last_name),
CONSTRAINT CHK_Person_Age_under CHECK (days_active<=18 AND user_city='New York'),
FOREIGN KEY (id) REFERENCES Persons(PersonID),
FOREIGN KEY (user_first_name, id) REFERENCES Persons(PersonID, PersonName),
);
"""
result = DDLParser(ddl).run(group_by_type=True)
expected = {
"comments": [" NOTE"],
"sequences": [],
"domains": [],
"schemas": [],
"tables": [
{
"alter": {},
"checks": [
{
"constraint_name": "CHK_Person_Age_under",
"statement": "days_active<=18 AND user_city= 'New York'",
}
],
"columns": [
{
"check": None,
"default": None,
"name": "id",
"nullable": False,
"references": {
"column": "PersonName",
"deferrable_initially": None,
"on_delete": None,
"on_update": None,
"schema": None,
"table": "Persons",
},
"size": (1, 1),
"type": "INT IDENTITY",
"unique": False,
},
{
"check": None,
"default": None,
"name": "company_id",
"nullable": True,
"references": None,
"size": None,
"type": "BIGINT",
"unique": True,
},
{
"check": None,
"default": None,
"name": "primary_id",
"nullable": True,
"references": {
"column": "PersonID",
"deferrable_initially": None,
"on_delete": None,
"on_update": None,
"schema": None,
"table": "Persons",
},
"size": None,
"type": "INT",
"unique": False,
},
{
"check": None,
"default": None,
"name": "age",
"nullable": True,
"references": None,
"size": None,
"type": "TINYINT",
"unique": True,
},
{
"check": None,
"default": None,
"name": "days_active",
"nullable": False,
"references": None,
"size": None,
"type": "SMALLINT",
"unique": False,
},
{
"check": None,
"default": None,
"name": "user_origin_of_birth",
"nullable": True,
"references": None,
"size": 255,
"type": "char",
"unique": False,
},
{
"check": None,
"default": None,
"name": "user_account",
"nullable": False,
"references": None,
"size": 8000,
"type": "VARCHAR",
"unique": False,
},
{
"check": None,
"default": None,
"name": "user_first_name",
"nullable": False,
"references": {
"column": "PersonID",
"deferrable_initially": None,
"on_delete": None,
"on_update": None,
"schema": None,
"table": "Persons",
},
"size": "max",
"type": "VARCHAR",
"unique": False,
},
{
"check": None,
"default": None,
"name": "user_last_name",
"nullable": False,
"references": None,
"size": 8000,
"type": "VARBINARY",
"unique": True,
},
{
"check": None,
"default": None,
"name": "user_street",
"nullable": True,
"references": None,
"size": 400,
"type": "NCHAR",
"unique": False,
},
{
"check": None,
"default": None,
"name": "user_city",
"nullable": True,
"references": None,
"size": 4000,
"type": "NVARCHAR",
"unique": False,
},
{
"check": None,
"default": None,
"name": "about_user",
"nullable": True,
"references": None,
"size": None,
"type": "NTEXT",
"unique": False,
},
{
"check": None,
"default": None,
"name": "user_description",
"nullable": True,
"references": None,
"size": None,
"type": "TEXT",
"unique": False,
},
{
"check": None,
"default": None,
"name": "starting_funds",
"nullable": True,
"references": None,
"size": 53,
"type": "FLOAT",
"unique": False,
},
{
"check": None,
"default": None,
"name": "extra_funds",
"nullable": True,
"references": None,
"size": None,
"type": "REAL",
"unique": False,
},
{
"check": None,
"default": None,
"name": "current_funds",
"nullable": True,
"references": None,
"size": (38, 20),
"type": "DECIMAL",
"unique": False,
},
{
"check": None,
"default": None,
"name": "ending_funds",
"nullable": False,
"references": None,
"size": None,
"type": "SMALLMONEY",
"unique": False,
},
{
"check": None,
"default": None,
"name": "birth_date",
"nullable": False,
"references": None,
"size": None,
"type": "DATE",
"unique": False,
},
{
"check": None,
"default": None,
"name": "time_of_birth",
"nullable": True,
"references": None,
"size": 7,
"type": "TIME",
"unique": False,
},
{
"check": None,
"default": "GETDATE()",
"name": "oder_date",
"nullable": True,
"references": None,
"size": None,
"type": "date",
"unique": False,
},
{
"check": None,
"default": "'Sandnes'",
"name": "country",
"nullable": True,
"references": None,
"size": 255,
"type": "varchar",
"unique": False,
},
{
"check": None,
"default": None,
"name": "active",
"nullable": True,
"references": None,
"size": None,
"type": "bit",
"unique": False,
},
{
"check": None,
"default": None,
"name": "home_size",
"nullable": True,
"references": None,
"size": None,
"type": "GEOMETRY",
"unique": False,
},
{
"check": None,
"default": None,
"name": "user_photo",
"nullable": True,
"references": None,
"size": None,
"type": "IMAGE",
"unique": False,
},
],
"constraints": {
"checks": [
{
"constraint_name": "CHK_Person_Age_under",
"statement": "days_active<=18 AND " "user_city= 'New York'",
}
],
"uniques": [
{
"columns": ["company_id", "user_last_name"],
"constraint_name": "UC_sqlserverlist_last_name",
}
],
},
"index": [],
"partitioned_by": [],
"primary_key": ["id"],
"schema": None,
"table_name": "sqlserverlist",
"tablespace": None,
}
],
"types": [],
"ddl_properties": [],
}
assert expected == result
def test_alter_unique():
ddl = """
CREATE TABLE sqlserverlist (
id INT IDENTITY (1,1) PRIMARY KEY, -- NOTE
company_id BIGINT ,
primary_id INT FOREIGN KEY REFERENCES Persons(PersonID), -- ADD THIS COLUMN FOR THE FOREIGN KEY
age TINYINT NULL UNIQUE,
days_active SMALLINT NOT NULL,
user_origin_of_birth char(255),
user_account VARCHAR(8000) NOT NULL,
user_first_name VARCHAR(max) NOT NULL,
user_last_name VARBINARY(8000) NOT NULL,
user_street NCHAR(400) NULL,
user_city NVARCHAR(4000),
about_user NTEXT NULL,
user_description TEXT,
starting_funds FLOAT(53) NULL,
extra_funds REAL,
current_funds DECIMAL (38,20),
ending_funds SMALLMONEY NOT NULL,
birth_date DATE NOT NULL,
time_of_birth TIME(7),
enrollment_date SMALLDATETIME,
delete_date DATETIME NULL,
create_date DATETIME2(7) NOT NULL,
user_time_zone DATETIMEOFFSET(7),
oder_date date DEFAULT GETDATE(), -- added to demonstrate sql sever Defaults
country varchar(255) DEFAULT 'Sandnes', -- added to demonstrate sql sever Defaults
active bit NULL,
home_size GEOMETRY, -- Sql Server Defaults to Null
user_photo IMAGE, -- Sql Server Defaults to Null
--UNIQUE (id),
CONSTRAINT UC_sqlserverlist_last_name UNIQUE (company_id,user_last_name),
CONSTRAINT CHK_Person_Age_under CHECK (days_active<=18 AND user_city='New York'),
FOREIGN KEY (id) REFERENCES Persons(PersonID),
CONSTRAINT FK_Person_Age_under FOREIGN KEY (id)REFERENCES Persons(PersonID)
);
-- UNIQUE CONSTRAINTS
ALTER TABLE sqlserverlist ADD UNIQUE (birth_date);
ALTER TABLE sqlserverlist ADD CONSTRAINT UC_Person_ening_funds UNIQUE (current_funds,create_date);
"""
result = DDLParser(ddl).run(group_by_type=True)
expected = {
"comments": [" NOTE"],
"sequences": [],
"ddl_properties": [],
"domains": [],
"schemas": [],
"tables": [
{
"alter": {
"uniques": [
{"columns": ["birth_date"], "constraint_name": None},
{
"columns": ["current_funds", "create_date"],
"constraint_name": "UC_Person_ening_funds",
},
]
},
"checks": [
{
"constraint_name": "CHK_Person_Age_under",
"statement": "days_active<=18 AND user_city= 'New York'",
}
],
"columns": [
{
"check": None,
"default": None,
"name": "id",
"nullable": False,
"references": {
"column": "PersonID",
"deferrable_initially": None,
"on_delete": None,
"on_update": None,
"schema": None,
"table": "Persons",
},
"size": (1, 1),
"type": "INT IDENTITY",
"unique": False,
},
{
"check": None,
"default": None,
"name": "company_id",
"nullable": True,
"references": None,
"size": None,
"type": "BIGINT",
"unique": True,
},
{
"check": None,
"default": None,
"name": "primary_id",
"nullable": True,
"references": {
"column": "PersonID",
"deferrable_initially": None,
"on_delete": None,
"on_update": None,
"schema": None,
"table": "Persons",
},
"size": None,
"type": "INT",
"unique": False,
},
{
"check": None,
"default": None,
"name": "age",
"nullable": True,
"references": None,
"size": None,
"type": "TINYINT",
"unique": True,
},
{
"check": None,
"default": None,
"name": "days_active",
"nullable": False,
"references": None,
"size": None,
"type": "SMALLINT",
"unique": False,
},
{
"check": None,
"default": None,
"name": "user_origin_of_birth",
"nullable": True,
"references": None,
"size": 255,
"type": "char",
"unique": False,
},
{
"check": None,
"default": None,
"name": "user_account",
"nullable": False,
"references": None,
"size": 8000,
"type": "VARCHAR",
"unique": False,
},
{
"check": None,
"default": None,
"name": "user_first_name",
"nullable": False,
"references": None,
"size": "max",
"type": "VARCHAR",
"unique": False,
},
{
"check": None,
"default": None,
"name": "user_last_name",
"nullable": False,
"references": None,
"size": 8000,
"type": "VARBINARY",
"unique": True,
},
{
"check": None,
"default": None,
"name": "user_street",
"nullable": True,
"references": None,
"size": 400,
"type": "NCHAR",
"unique": False,
},
{
"check": None,
"default": None,
"name": "user_city",
"nullable": True,
"references": None,
"size": 4000,
"type": "NVARCHAR",
"unique": False,
},
{
"check": None,
"default": None,
"name": "about_user",
"nullable": True,
"references": None,
"size": None,
"type": "NTEXT",
"unique": False,
},
{
"check": None,
"default": None,
"name": "user_description",
"nullable": True,
"references": None,
"size": None,
"type": "TEXT",
"unique": False,
},
{
"check": None,
"default": None,
"name": "starting_funds",
"nullable": True,
"references": None,
"size": 53,
"type": "FLOAT",
"unique": False,
},
{
"check": None,
"default": None,
"name": "extra_funds",
"nullable": True,
"references": None,
"size": None,
"type": "REAL",
"unique": False,
},
{
"check": None,
"default": None,
"name": "current_funds",
"nullable": True,
"references": None,
"size": (38, 20),
"type": "DECIMAL",
"unique": True,
},
{
"check": None,
"default": None,
"name": "ending_funds",
"nullable": False,
"references": None,
"size": None,
"type": "SMALLMONEY",
"unique": False,
},
{
"check": None,
"default": None,
"name": "birth_date",
"nullable": False,
"references": None,
"size": None,
"type": "DATE",
"unique": True,
},
{
"check": None,
"default": None,
"name": "time_of_birth",
"nullable": True,
"references": None,
"size": 7,
"type": "TIME",
"unique": False,
},
{
"check": None,
"default": None,
"name": "enrollment_date",
"nullable": True,
"references": None,
"size": None,
"type": "SMALLDATETIME",
"unique": False,
},
{
"check": None,
"default": None,
"name": "delete_date",
"nullable": True,
"references": None,
"size": None,
"type": "DATETIME",
"unique": False,
},
{
"check": None,
"default": None,
"name": "create_date",
"nullable": False,
"references": None,
"size": 7,
"type": "DATETIME2",
"unique": True,
},
{
"check": None,
"default": None,
"name": "user_time_zone",
"nullable": True,
"references": None,
"size": 7,
"type": "DATETIMEOFFSET",
"unique": False,
},
{
"check": None,
"default": "GETDATE()",
"name": "oder_date",
"nullable": True,
"references": None,
"size": None,
"type": "date",
"unique": False,
},
{
"check": None,
"default": "'Sandnes'",
"name": "country",
"nullable": True,
"references": None,
"size": 255,
"type": "varchar",
"unique": False,
},
{
"check": None,
"default": None,
"name": "active",
"nullable": True,
"references": None,
"size": None,
"type": "bit",
"unique": False,
},
{
"check": None,
"default": None,
"name": "home_size",
"nullable": True,
"references": None,
"size": None,
"type": "GEOMETRY",
"unique": False,
},
{
"check": None,
"default": None,
"name": "user_photo",
"nullable": True,
"references": None,
"size": None,
"type": "IMAGE",
"unique": False,
},
],
"constraints": {
"checks": [
{
"constraint_name": "CHK_Person_Age_under",
"statement": "days_active<=18 AND " "user_city= 'New York'",
}
],
"references": [
{
"columns": ["PersonID"],
"constraint_name": "FK_Person_Age_under",
"deferrable_initially": None,
"on_delete": None,
"on_update": None,
"schema": None,
"table": "Persons",
}
],
"uniques": [
{
"columns": ["company_id", "user_last_name"],
"constraint_name": "UC_sqlserverlist_last_name",
}
],
},
"index": [],
"partitioned_by": [],
"primary_key": ["id"],
"schema": None,
"table_name": "sqlserverlist",
"tablespace": None,
}
],
"types": [],
}
assert expected == result
def test_defaults_in_alter():
ddl = """
CREATE TABLE sqlserverlist (
id INT IDENTITY (1,1) PRIMARY KEY, -- NOTE
company_id BIGINT ,
primary_id INT FOREIGN KEY REFERENCES Persons(PersonID), -- ADD THIS COLUMN FOR THE FOREIGN KEY
age TINYINT NULL UNIQUE,
days_active SMALLINT NOT NULL,
user_origin_of_birth char(255),
user_account VARCHAR(8000) NOT NULL,
user_time_zone DATETIMEOFFSET(7),
oder_date date DEFAULT GETDATE(), -- added to demonstrate sql sever Defaults
country varchar(255) DEFAULT 'Sandnes', -- added to demonstrate sql sever Defaults
active bit NULL,
home_size GEOMETRY, -- Sql Server Defaults to Null
user_photo IMAGE, -- Sql Server Defaults to Null
--UNIQUE (id),
CONSTRAINT UC_sqlserverlist_last_name UNIQUE (company_id,user_last_name),
CONSTRAINT CHK_Person_Age_under CHECK (days_active<=18 AND user_city='New York'),
FOREIGN KEY (id) REFERENCES Persons(PersonID),
CONSTRAINT FK_Person_Age_under FOREIGN KEY (id)REFERENCES Persons(PersonID)
);
ALTER TABLE sqlserverlist ADD CONSTRAINT df_user_street DEFAULT '1 WAY STREET' FOR user_street;
"""
result = DDLParser(ddl).run(group_by_type=True, output_mode="mssql")
expected = {
"comments": [" NOTE"],
"sequences": [],
"ddl_properties": [],
"domains": [],
"schemas": [],
"tables": [
{
"alter": {
"defaults": [
{
"columns": ["user_street"],
"constraint_name": "df_user_street",
"value": "'1 WAY STREET'",
}
]
},
"checks": [
{
"constraint_name": "CHK_Person_Age_under",
"statement": "days_active<=18 AND user_city= 'New York'",
}
],
"columns": [
{
"check": None,
"default": None,
"name": "id",
"nullable": False,
"references": {
"column": "PersonID",
"deferrable_initially": None,
"on_delete": None,
"on_update": None,
"schema": None,
"table": "Persons",
},
"size": (1, 1),
"type": "INT IDENTITY",
"unique": False,
},
{
"check": None,
"default": None,
"name": "company_id",
"nullable": True,
"references": None,
"size": None,
"type": "BIGINT",
"unique": True,
},
{
"check": None,
"default": None,
"name": "primary_id",
"nullable": True,
"references": {
"column": "PersonID",
"deferrable_initially": None,
"on_delete": None,
"on_update": None,
"schema": None,
"table": "Persons",
},
"size": None,
"type": "INT",
"unique": False,
},
{
"check": None,
"default": None,
"name": "age",
"nullable": True,
"references": None,
"size": None,
"type": "TINYINT",
"unique": True,
},
{
"check": None,
"default": None,
"name": "days_active",
"nullable": False,
"references": None,
"size": None,
"type": "SMALLINT",
"unique": False,
},
{
"check": None,
"default": None,
"name": "user_origin_of_birth",
"nullable": True,
"references": None,
"size": 255,
"type": "char",
"unique": False,
},
{
"check": None,
"default": None,
"name": "user_account",
"nullable": False,
"references": None,
"size": 8000,
"type": "VARCHAR",
"unique": False,
},
{
"check": None,
"default": None,
"name": "user_time_zone",
"nullable": True,
"references": None,
"size": 7,
"type": "DATETIMEOFFSET",
"unique": False,
},
{
"check": None,
"default": "GETDATE()",
"name": "oder_date",
"nullable": True,
"references": None,
"size": None,
"type": "date",
"unique": False,
},
{
"check": None,
"default": "'Sandnes'",
"name": "country",
"nullable": True,
"references": None,
"size": 255,
"type": "varchar",
"unique": False,
},
{
"check": None,
"default": None,
"name": "active",
"nullable": True,
"references": None,
"size": None,
"type": "bit",
"unique": False,
},
{
"check": None,
"default": None,
"name": "home_size",
"nullable": True,
"references": None,
"size": None,
"type": "GEOMETRY",
"unique": False,
},
{
"check": None,
"default": None,
"name": "user_photo",
"nullable": True,
"references": None,
"size": None,
"type": "IMAGE",
"unique": False,
},
],
"constraints": {
"checks": [
{
"constraint_name": "CHK_Person_Age_under",
"statement": "days_active<=18 AND " "user_city= 'New York'",
}
],
"references": [
{
"columns": ["PersonID"],
"constraint_name": "FK_Person_Age_under",
"deferrable_initially": None,
"on_delete": None,
"on_update": None,
"schema": None,
"table": "Persons",
}
],
"uniques": [
{
"columns": ["company_id", "user_last_name"],
"constraint_name": "UC_sqlserverlist_last_name",
}
],
},
"index": [],
"partitioned_by": [],
"primary_key": ["id"],
"schema": None,
"table_name": "sqlserverlist",
"tablespace": None,
}
],
"types": [],
}
assert expected == result
def test_mysql_constraint_pk():
ddl = """
CREATE TABLE Persons (
ID int NOT NULL,
LastName varchar(255) NOT NULL,
FirstName varchar(255),
Age int,
CONSTRAINT PK_Person PRIMARY KEY (ID,LastName)
);
"""
result = DDLParser(ddl).run(group_by_type=True)
expected = {
"sequences": [],
"ddl_properties": [],
"domains": [],
"schemas": [],
"tables": [
{
"alter": {},
"checks": [],
"columns": [
{
"check": None,
"default": None,
"name": "ID",
"nullable": False,
"references": None,
"size": None,
"type": "int",
"unique": False,
},
{
"check": None,
"default": None,
"name": "LastName",
"nullable": False,
"references": None,
"size": 255,
"type": "varchar",
"unique": False,
},
{
"check": None,
"default": None,
"name": "FirstName",
"nullable": True,
"references": None,
"size": 255,
"type": "varchar",
"unique": False,
},
{
"check": None,
"default": None,
"name": "Age",
"nullable": True,
"references": None,
"size": None,
"type": "int",
"unique": False,
},
],
"constraints": {
"primary_keys": [
{"columns": ["ID", "LastName"], "constraint_name": "PK_Person"}
]
},
"index": [],
"partitioned_by": [],
"primary_key": ["ID", "LastName"],
"schema": None,
"table_name": "Persons",
"tablespace": None,
}
],
"types": [],
}
assert expected == result
def test_constraint_primary_key():
expected = {
"ddl_properties": [],
"domains": [],
"schemas": [],
"sequences": [],
"tables": [
{
"alter": {},
"checks": [],
"clustered_primary_key": [{"column": "[id]", "order": "ASC"}],
"columns": [
{
"check": None,
"default": None,
"name": "[id]",
"nullable": False,
"references": None,
"size": (1, 1),
"type": "[int] IDENTITY",
"unique": False,
},
{
"check": None,
"default": None,
"name": "[user_id]",
"nullable": True,
"references": None,
"size": None,
"type": "[int]",
"unique": False,
},
],
"constraints": {
"primary_keys": [
{
"columns": ["[id]"],
"constraint_name": "[PK_users_WorkSchedule_id]",
},
{
"columns": ["[id]"],
"constraint_name": "[PK_users_WorkSchedule_id]",
},
]
},
"index": [],
"partitioned_by": [],
"primary_key": ["[id]"],
"schema": "[dbo]",
"table_name": "[users_WorkSchedule]",
"tablespace": None,
}
],
"types": [],
}
ddl = """CREATE TABLE [dbo].[users_WorkSchedule](
[id] [int] IDENTITY(1,1) NOT NULL,
[user_id] [int] NULL),
CONSTRAINT [PK_users_WorkSchedule_id] PRIMARY KEY CLUSTERED
(
[id] ASC
),
CONSTRAINT [PK_users_WorkSchedule_id] PRIMARY KEY
(
[id] ASC
)
"""
result = DDLParser(ddl).run(group_by_type=True)
assert result == expected
def test_constraint_with_with():
ddl = """
USE [mystaffonline]
GO
/****** Object: Table [dbo].[users_WorkSchedule] Script Date: 9/29/2021 9:55:26 PM ******/
SET ANSI_NULLS ON
GO
SET QUOTED_IDENTIFIER ON
GO
CREATE TABLE [dbo].[users_WorkSchedule](
[id] [int] IDENTITY(1,1) NOT NULL,
[RequestDropDate] [smalldatetime] NULL,
[ShiftClass] [varchar](5) NULL,
[StartHistory] [datetime2](7) GENERATED ALWAYS AS ROW START NOT NULL,
[EndHistory] [datetime2](7) GENERATED ALWAYS AS ROW END NOT NULL,
CONSTRAINT [PK_users_WorkSchedule_id] PRIMARY KEY CLUSTERED
(
[id] ASC
)
WITH (PAD_INDEX = OFF,
STATISTICS_NORECOMPUTE = OFF,
IGNORE_DUP_KEY = OFF,
ALLOW_ROW_LOCKS = ON,
ALLOW_PAGE_LOCKS = ON))"""
result = DDLParser(ddl).run(group_by_type=True)
expected = {
"comments": [
"***** Object: Table [dbo].[users_WorkSchedule] Script Date: "
"9/29/2021 9:55:26 PM ******/"
],
"ddl_properties": [
{"name": "ANSI_NULLS", "value": "ON"},
{"name": "QUOTED_IDENTIFIER", "value": "ON"},
],
"domains": [],
"schemas": [],
"sequences": [],
"tables": [
{
"alter": {},
"checks": [],
"clustered_primary_key": [{"column": "[id]", "order": "ASC"}],
"columns": [
{
"check": None,
"default": None,
"name": "[id]",
"nullable": False,
"references": None,
"size": (1, 1),
"type": "[int] IDENTITY",
"unique": False,
},
{
"check": None,
"default": None,
"name": "[RequestDropDate]",
"nullable": True,
"references": None,
"size": None,
"type": "[smalldatetime]",
"unique": False,
},
{
"check": None,
"default": None,
"name": "[ShiftClass]",
"nullable": True,
"references": None,
"size": 5,
"type": "[varchar]",
"unique": False,
},
{
"check": None,
"default": None,
"generated": {
"always": True,
"as": "ROW START",
"stored": False,
},
"name": "[StartHistory]",
"nullable": False,
"references": None,
"size": 7,
"type": "[datetime2]",
"unique": False,
},
{
"check": None,
"default": None,
"generated": {"always": True, "as": "ROW END", "stored": False},
"name": "[EndHistory]",
"nullable": False,
"references": None,
"size": 7,
"type": "[datetime2]",
"unique": False,
},
],
"constraints": {
"primary_keys": [
{
"columns": ["[id]"],
"constraint_name": "[PK_users_WorkSchedule_id]",
}
]
},
"index": [],
"partitioned_by": [],
"primary_key": ["[id]"],
"schema": "[dbo]",
"table_name": "[users_WorkSchedule]",
"tablespace": None,
"with": {
"on": None,
"properties": [
{"name": "PAD_INDEX", "value": "OFF"},
{"name": "STATISTICS_NORECOMPUTE", "value": "OFF"},
{"name": "IGNORE_DUP_KEY", "value": "OFF"},
{"name": "ALLOW_ROW_LOCKS", "value": "ON"},
{"name": "ALLOW_PAGE_LOCKS", "value": "ON"},
],
},
}
],
"types": [],
}
assert expected == result
def test_with_on():
expected = {
"ddl_properties": [],
"domains": [],
"schemas": [],
"sequences": [],
"tables": [
{
"alter": {},
"checks": [],
"clustered_primary_key": [{"column": "[id]", "order": "ASC"}],
"columns": [
{
"check": None,
"default": None,
"name": "[id]",
"nullable": False,
"references": None,
"size": (1, 1),
"type": "[int] IDENTITY",
"unique": False,
},
{
"check": None,
"default": None,
"name": "[RequestDropDate]",
"nullable": True,
"references": None,
"size": None,
"type": "[smalldatetime]",
"unique": False,
},
{
"check": None,
"default": None,
"name": "[ShiftClass]",
"nullable": True,
"references": None,
"size": 5,
"type": "[varchar]",
"unique": False,
},
{
"check": None,
"default": None,
"generated": {
"always": True,
"as": "ROW START",
"stored": False,
},
"name": "[StartHistory]",
"nullable": False,
"references": None,
"size": 7,
"type": "[datetime2]",
"unique": False,
},
{
"check": None,
"default": None,
"generated": {"always": True, "as": "ROW END", "stored": False},
"name": "[EndHistory]",
"nullable": False,
"references": None,
"size": 7,
"type": "[datetime2]",
"unique": False,
},
],
"constraints": {
"primary_keys": [
{
"columns": ["[id]"],
"constraint_name": "[PK_users_WorkSchedule_id]",
}
]
},
"index": [],
"on": "[PRIMARY]",
"partitioned_by": [],
"primary_key": ["[id]"],
"schema": "[dbo]",
"table_name": "[users_WorkSchedule]",
"tablespace": None,
"with": {
"on": None,
"properties": [
{"name": "PAD_INDEX", "value": "OFF"},
{"name": "STATISTICS_NORECOMPUTE", "value": "OFF"},
{"name": "IGNORE_DUP_KEY", "value": "OFF"},
{"name": "ALLOW_ROW_LOCKS", "value": "ON"},
{"name": "ALLOW_PAGE_LOCKS", "value": "ON"},
],
},
}
],
"types": [],
}
ddl = """CREATE TABLE [dbo].[users_WorkSchedule](
[id] [int] IDENTITY(1,1) NOT NULL,
[RequestDropDate] [smalldatetime] NULL,
[ShiftClass] [varchar](5) NULL,
[StartHistory] [datetime2](7) GENERATED ALWAYS AS ROW START NOT NULL,
[EndHistory] [datetime2](7) GENERATED ALWAYS AS ROW END NOT NULL,
CONSTRAINT [PK_users_WorkSchedule_id] PRIMARY KEY CLUSTERED
(
[id] ASC
)
WITH (
PAD_INDEX = OFF,
STATISTICS_NORECOMPUTE = OFF,
IGNORE_DUP_KEY = OFF,
ALLOW_ROW_LOCKS = ON,
ALLOW_PAGE_LOCKS = ON
) ON [PRIMARY]
)"""
result = DDLParser(ddl).run(group_by_type=True)
assert expected == result
def test_period_for_system_time():
ddl = """CREATE TABLE [dbo].[users_WorkSchedule](
[id] [int] IDENTITY(1,1) NOT NULL,
[RequestDropDate] [smalldatetime] NULL,
[ShiftClass] [varchar](5) NULL,
[StartHistory] [datetime2](7) GENERATED ALWAYS AS ROW START NOT NULL,
[EndHistory] [datetime2](7) GENERATED ALWAYS AS ROW END NOT NULL,
CONSTRAINT [PK_users_WorkSchedule_id] PRIMARY KEY CLUSTERED
(
[id] ASC
)
WITH (
PAD_INDEX = OFF,
STATISTICS_NORECOMPUTE = OFF,
IGNORE_DUP_KEY = OFF,
ALLOW_ROW_LOCKS = ON,
ALLOW_PAGE_LOCKS = ON
) ON [PRIMARY],
PERIOD FOR SYSTEM_TIME ([StartHistory], [EndHistory])
)
"""
result = DDLParser(ddl).run(group_by_type=True)
expected = {
"ddl_properties": [],
"domains": [],
"schemas": [],
"sequences": [],
"tables": [
{
"alter": {},
"checks": [],
"clustered_primary_key": [{"column": "[id]", "order": "ASC"}],
"columns": [
{
"check": None,
"default": None,
"name": "[id]",
"nullable": False,
"references": None,
"size": (1, 1),
"type": "[int] IDENTITY",
"unique": False,
},
{
"check": None,
"default": None,
"name": "[RequestDropDate]",
"nullable": True,
"references": None,
"size": None,
"type": "[smalldatetime]",
"unique": False,
},
{
"check": None,
"default": None,
"name": "[ShiftClass]",
"nullable": True,
"references": None,
"size": 5,
"type": "[varchar]",
"unique": False,
},
{
"check": None,
"default": None,
"generated": {
"always": True,
"as": "ROW START",
"stored": False,
},
"name": "[StartHistory]",
"nullable": False,
"references": None,
"size": 7,
"type": "[datetime2]",
"unique": False,
},
{
"check": None,
"default": None,
"generated": {"always": True, "as": "ROW END", "stored": False},
"name": "[EndHistory]",
"nullable": False,
"references": None,
"size": 7,
"type": "[datetime2]",
"unique": False,
},
],
"constraints": {
"primary_keys": [
{
"columns": ["[id]"],
"constraint_name": "[PK_users_WorkSchedule_id]",
}
]
},
"index": [],
"on": "[PRIMARY]",
"partitioned_by": [],
"period_for_system_time": ["[StartHistory]", "[EndHistory]"],
"primary_key": ["[id]"],
"schema": "[dbo]",
"table_name": "[users_WorkSchedule]",
"tablespace": None,
"with": {
"on": None,
"properties": [
{"name": "PAD_INDEX", "value": "OFF"},
{"name": "STATISTICS_NORECOMPUTE", "value": "OFF"},
{"name": "IGNORE_DUP_KEY", "value": "OFF"},
{"name": "ALLOW_ROW_LOCKS", "value": "ON"},
{"name": "ALLOW_PAGE_LOCKS", "value": "ON"},
],
},
}
],
"types": [],
}
assert expected == result
def test_on_primary_on_table_level():
ddl = """CREATE TABLE [dbo].[users_WorkSchedule](
[id] [int] IDENTITY(1,1) NOT NULL,
[RequestDropDate] [smalldatetime] NULL,
[ShiftClass] [varchar](5) NULL,
[StartHistory] [datetime2](7) GENERATED ALWAYS AS ROW START NOT NULL,
[EndHistory] [datetime2](7) GENERATED ALWAYS AS ROW END NOT NULL,
CONSTRAINT [PK_users_WorkSchedule_id] PRIMARY KEY CLUSTERED
(
[id] ASC
)
WITH (
PAD_INDEX = OFF,
STATISTICS_NORECOMPUTE = OFF,
IGNORE_DUP_KEY = OFF,
ALLOW_ROW_LOCKS = ON,
ALLOW_PAGE_LOCKS = ON
) ON [PRIMARY],
PERIOD FOR SYSTEM_TIME ([StartHistory], [EndHistory])
)) ON [PRIMARY]
"""
result = DDLParser(ddl).run(group_by_type=True)
expected = {
"ddl_properties": [],
"domains": [],
"schemas": [],
"sequences": [],
"tables": [
{
"alter": {},
"checks": [],
"clustered_primary_key": [{"column": "[id]", "order": "ASC"}],
"columns": [
{
"check": None,
"default": None,
"name": "[id]",
"nullable": False,
"references": None,
"size": (1, 1),
"type": "[int] IDENTITY",
"unique": False,
},
{
"check": None,
"default": None,
"name": "[RequestDropDate]",
"nullable": True,
"references": None,
"size": None,
"type": "[smalldatetime]",
"unique": False,
},
{
"check": None,
"default": None,
"name": "[ShiftClass]",
"nullable": True,
"references": None,
"size": 5,
"type": "[varchar]",
"unique": False,
},
{
"check": None,
"default": None,
"generated": {
"always": True,
"as": "ROW START",
"stored": False,
},
"name": "[StartHistory]",
"nullable": False,
"references": None,
"size": 7,
"type": "[datetime2]",
"unique": False,
},
{
"check": None,
"default": None,
"generated": {"always": True, "as": "ROW END", "stored": False},
"name": "[EndHistory]",
"nullable": False,
"references": None,
"size": 7,
"type": "[datetime2]",
"unique": False,
},
],
"constraints": {
"primary_keys": [
{
"columns": ["[id]"],
"constraint_name": "[PK_users_WorkSchedule_id]",
}
]
},
"index": [],
"on": "[PRIMARY]",
"partitioned_by": [],
"period_for_system_time": ["[StartHistory]", "[EndHistory]"],
"primary_key": ["[id]"],
"schema": "[dbo]",
"table_name": "[users_WorkSchedule]",
"tablespace": None,
"with": {
"on": None,
"properties": [
{"name": "PAD_INDEX", "value": "OFF"},
{"name": "STATISTICS_NORECOMPUTE", "value": "OFF"},
{"name": "IGNORE_DUP_KEY", "value": "OFF"},
{"name": "ALLOW_ROW_LOCKS", "value": "ON"},
{"name": "ALLOW_PAGE_LOCKS", "value": "ON"},
],
},
}
],
"types": [],
}
assert expected == result
def test_with_on_table_level():
ddl = """USE [mystaffonline]
GO
/****** Object: Table [dbo].[users_WorkSchedule] Script Date: 9/29/2021 9:55:26 PM ******/
SET ANSI_NULLS ON
GO
SET QUOTED_IDENTIFIER ON
GO
CREATE TABLE [dbo].[users_WorkSchedule](
[id] [int] IDENTITY(1,1) NOT NULL,
[user_id] [int] NULL,
CONSTRAINT [PK_users_WorkSchedule_id] PRIMARY KEY CLUSTERED
(
[id] ASC
) WITH (PAD_INDEX = OFF, STATISTICS_NORECOMPUTE = OFF, IGNORE_DUP_KEY = OFF,
ALLOW_ROW_LOCKS = ON, ALLOW_PAGE_LOCKS = ON) ON [PRIMARY],
PERIOD FOR SYSTEM_TIME ([StartHistory], [EndHistory])
) ON [PRIMARY]
WITH
(
SYSTEM_VERSIONING = ON
)"""
result = DDLParser(ddl).run(group_by_type=True)
expected = {
"comments": [
"***** Object: Table [dbo].[users_WorkSchedule] Script Date: "
"9/29/2021 9:55:26 PM ******/"
],
"ddl_properties": [
{"name": "ANSI_NULLS", "value": "ON"},
{"name": "QUOTED_IDENTIFIER", "value": "ON"},
],
"domains": [],
"schemas": [],
"sequences": [],
"tables": [
{
"alter": {},
"checks": [],
"clustered_primary_key": [{"column": "[id]", "order": "ASC"}],
"columns": [
{
"check": None,
"default": None,
"name": "[id]",
"nullable": False,
"references": None,
"size": (1, 1),
"type": "[int] IDENTITY",
"unique": False,
},
{
"check": None,
"default": None,
"name": "[user_id]",
"nullable": True,
"references": None,
"size": None,
"type": "[int]",
"unique": False,
},
],
"constraints": {
"primary_keys": [
{
"columns": ["[id]"],
"constraint_name": "[PK_users_WorkSchedule_id]",
}
]
},
"index": [],
"on": "[PRIMARY]",
"partitioned_by": [],
"period_for_system_time": ["[StartHistory]", "[EndHistory]"],
"primary_key": ["[id]"],
"schema": "[dbo]",
"table_name": "[users_WorkSchedule]",
"tablespace": None,
"with": {
"on": None,
"properties": [{"name": "SYSTEM_VERSIONING", "value": "ON"}],
},
}
],
"types": [],
}
assert expected == result
def test_with_on_with_properties():
ddl = """USE [mystaffonline]
GO
/****** Object: Table [dbo].[users_WorkSchedule] Script Date: 9/29/2021 9:55:26 PM ******/
SET ANSI_NULLS ON
GO
SET QUOTED_IDENTIFIER ON
GO
CREATE TABLE [dbo].[users_WorkSchedule](
[id] [int] IDENTITY(1,1) NOT NULL,
[user_id] [int] NULL,
[station_Id] [int] NULL,
CONSTRAINT [PK_users_WorkSchedule_id] PRIMARY KEY CLUSTERED
(
[id] ASC
)WITH (PAD_INDEX = OFF, STATISTICS_NORECOMPUTE = OFF, IGNORE_DUP_KEY = OFF,
ALLOW_ROW_LOCKS = ON, ALLOW_PAGE_LOCKS = ON) ON [PRIMARY],
PERIOD FOR SYSTEM_TIME ([StartHistory], [EndHistory])
) ON [PRIMARY]
WITH
(
SYSTEM_VERSIONING = ON ( HISTORY_TABLE = [dbo].[users_WorkScheduleHistory] )
)"""
result = DDLParser(ddl).run(group_by_type=True)
expected = {
"comments": [
"***** Object: Table [dbo].[users_WorkSchedule] Script Date: "
"9/29/2021 9:55:26 PM ******/"
],
"ddl_properties": [
{"name": "ANSI_NULLS", "value": "ON"},
{"name": "QUOTED_IDENTIFIER", "value": "ON"},
],
"domains": [],
"schemas": [],
"sequences": [],
"tables": [
{
"alter": {},
"checks": [],
"clustered_primary_key": [{"column": "[id]", "order": "ASC"}],
"columns": [
{
"check": None,
"default": None,
"name": "[id]",
"nullable": False,
"references": None,
"size": (1, 1),
"type": "[int] IDENTITY",
"unique": False,
},
{
"check": None,
"default": None,
"name": "[user_id]",
"nullable": True,
"references": None,
"size": None,
"type": "[int]",
"unique": False,
},
{
"check": None,
"default": None,
"name": "[station_Id]",
"nullable": True,
"references": None,
"size": None,
"type": "[int]",
"unique": False,
},
],
"constraints": {
"primary_keys": [
{
"columns": ["[id]"],
"constraint_name": "[PK_users_WorkSchedule_id]",
}
]
},
"index": [],
"on": "[PRIMARY]",
"partitioned_by": [],
"period_for_system_time": ["[StartHistory]", "[EndHistory]"],
"primary_key": ["[id]"],
"schema": "[dbo]",
"table_name": "[users_WorkSchedule]",
"tablespace": None,
"with": {
"on": None,
"properties": [
{"name": "SYSTEM_VERSIONING", "value": "ON"},
{
"properties": [
{
"name": "HISTORY_TABLE",
"value": "[dbo].[users_WorkScheduleHistory]",
}
]
},
],
},
}
],
"types": [],
}
assert expected == result
def test_output_separated_by_go_and_textimage():
ddl = """/****** Object: Table [dbo].[TO_Requests] Script Date: 9/29/2021 9:55:26 PM ******/
SET ANSI_NULLS ON
GO
SET QUOTED_IDENTIFIER ON
GO
CREATE TABLE [dbo].[TO_Requests](
[Request_ID] [int] IDENTITY(1,1) NOT NULL,
[user_id] [int] NULL,
[date_from] [smalldatetime] NULL,
[date_to] [smalldatetime] NULL,
[comments] [varchar](2000) NULL,
[status] [varchar](10) NOT NULL,
[profile_id] [int] NULL,
[DateAdded] [smalldatetime] NOT NULL,
[UpdatedBy] [int] NULL,
[UpdatedOn] [smalldatetime] NULL,
[InitialResponseBy] [int] NULL,
[InitialResponseDate] [smalldatetime] NULL,
[InitialResponseStatus] [varchar](10) NULL,
[adpRequestId] [varchar](50) NULL,
CONSTRAINT [PK_TO_Requests_Request_ID] PRIMARY KEY CLUSTERED
(
[Request_ID] ASC
)WITH (PAD_INDEX = OFF, STATISTICS_NORECOMPUTE = OFF, IGNORE_DUP_KEY = OFF,
ALLOW_ROW_LOCKS = ON, ALLOW_PAGE_LOCKS = ON) ON [PRIMARY]
) ON [PRIMARY]
GO
/****** Object: Table [dbo].[ToDo] Script Date: 9/29/2021 9:55:26 PM ******/
SET ANSI_NULLS ON
GO
SET QUOTED_IDENTIFIER ON
GO
CREATE TABLE [dbo].[ToDo](
[ID] [int] IDENTITY(1,1) NOT NULL,
[ProfileID] [int] NOT NULL,
[Title] [varchar](50) NULL,
CONSTRAINT [PK_ToDo_ID] PRIMARY KEY CLUSTERED
(
[ID] ASC
)WITH (PAD_INDEX = OFF, STATISTICS_NORECOMPUTE = OFF, IGNORE_DUP_KEY = OFF,
ALLOW_ROW_LOCKS = ON, ALLOW_PAGE_LOCKS = ON) ON [PRIMARY]
) ON [PRIMARY] TEXTIMAGE_ON [PRIMARY]
GO
/****** Object: Table [dbo].[ToDoComments] Script Date: 9/29/2021 9:55:26 PM ******/
SET ANSI_NULLS ON
GO
SET QUOTED_IDENTIFIER ON
GO
CREATE TABLE [dbo].[ToDoComments](
[ToDoCommentsId] [int] IDENTITY(1,1) NOT NULL,
[CreatedBy] [int] NOT NULL,
CONSTRAINT [PK_ToDoComments_ToDoCommentsId] PRIMARY KEY CLUSTERED
(
[ToDoCommentsId] ASC
)WITH (PAD_INDEX = OFF, STATISTICS_NORECOMPUTE = OFF, IGNORE_DUP_KEY = OFF,
ALLOW_ROW_LOCKS = ON, ALLOW_PAGE_LOCKS = ON) ON [PRIMARY]
) ON [PRIMARY] TEXTIMAGE_ON [PRIMARY]
GO"""
result = DDLParser(ddl).run(group_by_type=True)
expected = {
"comments": [
"***** Object: Table [dbo].[TO_Requests] Script Date: "
"9/29/2021 9:55:26 PM ******/"
],
"ddl_properties": [
{"name": "ANSI_NULLS", "value": "ON"},
{"name": "QUOTED_IDENTIFIER", "value": "ON"},
{"name": "ANSI_NULLS", "value": "ON"},
{"name": "QUOTED_IDENTIFIER", "value": "ON"},
{"name": "ANSI_NULLS", "value": "ON"},
{"name": "QUOTED_IDENTIFIER", "value": "ON"},
],
"domains": [],
"schemas": [],
"sequences": [],
"tables": [
{
"alter": {},
"checks": [],
"clustered_primary_key": [{"column": "[Request_ID]", "order": "ASC"}],
"columns": [
{
"check": None,
"default": None,
"name": "[Request_ID]",
"nullable": False,
"references": None,
"size": (1, 1),
"type": "[int] IDENTITY",
"unique": False,
},
{
"check": None,
"default": None,
"name": "[user_id]",
"nullable": True,
"references": None,
"size": None,
"type": "[int]",
"unique": False,
},
{
"check": None,
"default": None,
"name": "[date_from]",
"nullable": True,
"references": None,
"size": None,
"type": "[smalldatetime]",
"unique": False,
},
{
"check": None,
"default": None,
"name": "[date_to]",
"nullable": True,
"references": None,
"size": None,
"type": "[smalldatetime]",
"unique": False,
},
{
"check": None,
"default": None,
"name": "[comments]",
"nullable": True,
"references": None,
"size": 2000,
"type": "[varchar]",
"unique": False,
},
{
"check": None,
"default": None,
"name": "[status]",
"nullable": False,
"references": None,
"size": 10,
"type": "[varchar]",
"unique": False,
},
{
"check": None,
"default": None,
"name": "[profile_id]",
"nullable": True,
"references": None,
"size": None,
"type": "[int]",
"unique": False,
},
{
"check": None,
"default": None,
"name": "[DateAdded]",
"nullable": False,
"references": None,
"size": None,
"type": "[smalldatetime]",
"unique": False,
},
{
"check": None,
"default": None,
"name": "[UpdatedBy]",
"nullable": True,
"references": None,
"size": None,
"type": "[int]",
"unique": False,
},
{
"check": None,
"default": None,
"name": "[UpdatedOn]",
"nullable": True,
"references": None,
"size": None,
"type": "[smalldatetime]",
"unique": False,
},
{
"check": None,
"default": None,
"name": "[InitialResponseBy]",
"nullable": True,
"references": None,
"size": None,
"type": "[int]",
"unique": False,
},
{
"check": None,
"default": None,
"name": "[InitialResponseDate]",
"nullable": True,
"references": None,
"size": None,
"type": "[smalldatetime]",
"unique": False,
},
{
"check": None,
"default": None,
"name": "[InitialResponseStatus]",
"nullable": True,
"references": None,
"size": 10,
"type": "[varchar]",
"unique": False,
},
{
"check": None,
"default": None,
"name": "[adpRequestId]",
"nullable": True,
"references": None,
"size": 50,
"type": "[varchar]",
"unique": False,
},
],
"constraints": {
"primary_keys": [
{
"columns": ["[Request_ID]"],
"constraint_name": "[PK_TO_Requests_Request_ID]",
}
]
},
"index": [],
"on": "[PRIMARY]",
"partitioned_by": [],
"primary_key": ["[Request_ID]"],
"schema": "[dbo]",
"table_name": "[TO_Requests]",
"tablespace": None,
"with": {
"on": None,
"properties": [
{"name": "PAD_INDEX", "value": "OFF"},
{"name": "STATISTICS_NORECOMPUTE", "value": "OFF"},
{"name": "IGNORE_DUP_KEY", "value": "OFF"},
{"name": "ALLOW_ROW_LOCKS", "value": "ON"},
{"name": "ALLOW_PAGE_LOCKS", "value": "ON"},
],
},
},
{
"alter": {},
"checks": [],
"clustered_primary_key": [{"column": "[ID]", "order": "ASC"}],
"columns": [
{
"check": None,
"default": None,
"name": "[ID]",
"nullable": False,
"references": None,
"size": (1, 1),
"type": "[int] IDENTITY",
"unique": False,
},
{
"check": None,
"default": None,
"name": "[ProfileID]",
"nullable": False,
"references": None,
"size": None,
"type": "[int]",
"unique": False,
},
{
"check": None,
"default": None,
"name": "[Title]",
"nullable": True,
"references": None,
"size": 50,
"type": "[varchar]",
"unique": False,
},
],
"constraints": {
"primary_keys": [
{"columns": ["[ID]"], "constraint_name": "[PK_ToDo_ID]"}
]
},
"index": [],
"on": "[PRIMARY]",
"partitioned_by": [],
"primary_key": ["[ID]"],
"schema": "[dbo]",
"table_name": "[ToDo]",
"tablespace": None,
"textimage_on": "[PRIMARY]",
"with": {
"on": None,
"properties": [
{"name": "PAD_INDEX", "value": "OFF"},
{"name": "STATISTICS_NORECOMPUTE", "value": "OFF"},
{"name": "IGNORE_DUP_KEY", "value": "OFF"},
{"name": "ALLOW_ROW_LOCKS", "value": "ON"},
{"name": "ALLOW_PAGE_LOCKS", "value": "ON"},
],
},
},
{
"alter": {},
"checks": [],
"clustered_primary_key": [
{"column": "[ToDoCommentsId]", "order": "ASC"}
],
"columns": [
{
"check": None,
"default": None,
"name": "[ToDoCommentsId]",
"nullable": False,
"references": None,
"size": (1, 1),
"type": "[int] IDENTITY",
"unique": False,
},
{
"check": None,
"default": None,
"name": "[CreatedBy]",
"nullable": False,
"references": None,
"size": None,
"type": "[int]",
"unique": False,
},
],
"constraints": {
"primary_keys": [
{
"columns": ["[ToDoCommentsId]"],
"constraint_name": "[PK_ToDoComments_ToDoCommentsId]",
}
]
},
"index": [],
"on": "[PRIMARY]",
"partitioned_by": [],
"primary_key": ["[ToDoCommentsId]"],
"schema": "[dbo]",
"table_name": "[ToDoComments]",
"tablespace": None,
"textimage_on": "[PRIMARY]",
"with": {
"on": None,
"properties": [
{"name": "PAD_INDEX", "value": "OFF"},
{"name": "STATISTICS_NORECOMPUTE", "value": "OFF"},
{"name": "IGNORE_DUP_KEY", "value": "OFF"},
{"name": "ALLOW_ROW_LOCKS", "value": "ON"},
{"name": "ALLOW_PAGE_LOCKS", "value": "ON"},
],
},
},
],
"types": [],
}
assert expected == result
def test_next_value_for():
ddl = """CREATE TABLE [dbo].[SLIPEVENTO] (
[cdSLIP] [bigint] NOT NULL
DEFAULT NEXT VALUE FOR [dbo].[sqCdSLIPEvt] )"""
result = DDLParser(ddl).run(group_by_type=True)
expected = {
"ddl_properties": [],
"domains": [],
"schemas": [],
"sequences": [],
"tables": [
{
"alter": {},
"checks": [],
"columns": [
{
"check": None,
"default": {"next_value_for": "[dbo].[sqCdSLIPEvt]"},
"name": "[cdSLIP]",
"nullable": False,
"references": None,
"size": None,
"type": "[bigint]",
"unique": False,
}
],
"index": [],
"partitioned_by": [],
"primary_key": [],
"schema": "[dbo]",
"table_name": "[SLIPEVENTO]",
"tablespace": None,
}
],
"types": [],
}
assert expected == result
def test_primary_key_clustered():
ddl = """
USE [sasgolddevdb]
GO
/****** Object: Table [aud].[tcal_tgt] Script Date: 11/11/2021 11:18:41 AM ******/
SET ANSI_NULLS ON
GO
SET QUOTED_IDENTIFIER ON
GO
CREATE TABLE [aud].[tcal_tgt](
[TCAL_SID] [decimal](30, 0) NOT NULL,
[TERM_YR] [varchar](4) NULL,
[REC_DELETED_BY] [varchar](25) NULL,
[SYSTEM_OPERATION] [varchar](1) NULL,
PRIMARY KEY CLUSTERED
(
[TCAL_SID] ASC, [TERM_YR] DESC
) WITH (PAD_INDEX = OFF, STATISTICS_NORECOMPUTE = OFF,
IGNORE_DUP_KEY = OFF, ALLOW_ROW_LOCKS = ON, ALLOW_PAGE_LOCKS = ON, FILLFACTOR = 80) ON [PRIMARY]
) ON [PRIMARY]
GO"""
result = DDLParser(ddl).run(group_by_type=True)
expected = {
"comments": [
"***** Object: Table [aud].[tcal_tgt] Script Date: "
"11/11/2021 11:18:41 AM ******/"
],
"ddl_properties": [
{"name": "ANSI_NULLS", "value": "ON"},
{"name": "QUOTED_IDENTIFIER", "value": "ON"},
],
"domains": [],
"schemas": [],
"sequences": [],
"tables": [
{
"alter": {},
"checks": [],
"columns": [
{
"check": None,
"default": None,
"name": "[TCAL_SID]",
"nullable": False,
"references": None,
"size": (30, 0),
"type": "[decimal]",
"unique": False,
},
{
"check": None,
"default": None,
"name": "[TERM_YR]",
"nullable": False,
"references": None,
"size": 4,
"type": "[varchar]",
"unique": False,
},
{
"check": None,
"default": None,
"name": "[REC_DELETED_BY]",
"nullable": True,
"references": None,
"size": 25,
"type": "[varchar]",
"unique": False,
},
{
"check": None,
"default": None,
"name": "[SYSTEM_OPERATION]",
"nullable": True,
"references": None,
"size": 1,
"type": "[varchar]",
"unique": False,
},
],
"index": [],
"on": "[PRIMARY]",
"partitioned_by": [],
"primary_key": ["[TCAL_SID]", "[TERM_YR]"],
"schema": "[aud]",
"table_name": "[tcal_tgt]",
"tablespace": None,
"clustered_primary_key": [
{"column": "[TCAL_SID]", "order": "ASC"},
{"column": "[TERM_YR]", "order": "DESC"},
],
"with": {
"on": None,
"properties": [
{"name": "PAD_INDEX", "value": "OFF"},
{"name": "STATISTICS_NORECOMPUTE", "value": "OFF"},
{"name": "IGNORE_DUP_KEY", "value": "OFF"},
{"name": "ALLOW_ROW_LOCKS", "value": "ON"},
{"name": "ALLOW_PAGE_LOCKS", "value": "ON"},
{"name": "FILLFACTOR", "value": "80"},
],
},
}
],
"types": [],
}
assert expected == result
| 36.08013
| 101
| 0.321254
| 6,674
| 110,766
| 5.179652
| 0.038807
| 0.041916
| 0.074518
| 0.08794
| 0.930631
| 0.915271
| 0.910324
| 0.904336
| 0.89803
| 0.887992
| 0
| 0.011566
| 0.558204
| 110,766
| 3,069
| 102
| 36.091887
| 0.694846
| 0
| 0
| 0.767023
| 0
| 0.002336
| 0.311224
| 0.020439
| 0
| 0
| 0
| 0
| 0.006676
| 1
| 0.006676
| false
| 0
| 0.000334
| 0
| 0.007009
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 8
|
fd424e59e1fe2378e9c95b0b173194eed13fd9dd
| 95
|
py
|
Python
|
ukpocopy/__init__.py
|
sephioh/ukpocopy
|
d5bdaf7c95636fbf0dd5fee1a84ef15ea5541167
|
[
"MIT"
] | null | null | null |
ukpocopy/__init__.py
|
sephioh/ukpocopy
|
d5bdaf7c95636fbf0dd5fee1a84ef15ea5541167
|
[
"MIT"
] | 4
|
2020-03-24T17:05:53.000Z
|
2020-04-06T19:37:10.000Z
|
ukpocopy/__init__.py
|
sephioh/ukpocopy
|
d5bdaf7c95636fbf0dd5fee1a84ef15ea5541167
|
[
"MIT"
] | null | null | null |
from ukpocopy import postcodes
from ukpocopy import validators
from ukpocopy import exceptions
| 23.75
| 31
| 0.873684
| 12
| 95
| 6.916667
| 0.5
| 0.433735
| 0.650602
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.126316
| 95
| 3
| 32
| 31.666667
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| true
| 0
| 1
| 0
| 1
| 0
| 1
| 0
| 0
| null | 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 0
| 1
| 0
|
0
| 7
|
fd61b9977ba92f99d8a64003d615798a0cedd70f
| 11,403
|
py
|
Python
|
App(BE)/main/api/unit.py
|
osamhack2021/AI_APP_handylib_devlib
|
62cf67e6df280217e3715e2aa425636cefa7dd6f
|
[
"MIT"
] | 1
|
2021-12-16T10:41:16.000Z
|
2021-12-16T10:41:16.000Z
|
App(BE)/main/api/unit.py
|
osamhack2021/AI_APP_handylib_devlib
|
62cf67e6df280217e3715e2aa425636cefa7dd6f
|
[
"MIT"
] | 6
|
2021-10-11T06:03:48.000Z
|
2021-10-17T09:42:05.000Z
|
App(BE)/main/api/unit.py
|
osamhack2021/AI_APP_handylib_devlib
|
62cf67e6df280217e3715e2aa425636cefa7dd6f
|
[
"MIT"
] | null | null | null |
#Unit blueprint
from flask import Blueprint, json, Response
from main.models import database
unit_page=Blueprint('unit',__name__)
@unit_page.route('/',methods=['GET'])
def unit_name():
jsl = {'Unit_name': database.client.API_test.unittest.distinct('name') }
resultJson = json.dumps(jsl, ensure_ascii=False)
return Response(resultJson,mimetype="application/json",status=200)
@unit_page.route('/<Unit_name>&<page>', methods=['GET'])
def Unit_books_list(Unit_name, page):
page = int(page)
if page <= 1:
page = 0
else :
page = page - 1
c = 0
for i in database.client.API_test.unittest.distinct('name'):
if i == '{}'.format(Unit_name):
c += 1
if c == 1 :
a = list (database.client.API_test.unittest.find({'name': '{}'.format(Unit_name)}).skip(page*18).limit(18))
for i in range(0,len(a)):
a[i].update(database.client.API_test.book.find({'isbn': a[i]['isbn']})[0])
del (a[i]['_id'])
result = {'name': '{}'.format(Unit_name), 'books_list': a}
resultJson = json.dumps(result, ensure_ascii=False)
return Response(resultJson,mimetype="application/json",status=200)
else :
if c == 0 :
jsl = {'Error': '부대가 없거나 올바른 부대를 입력해주세요.'}
resultJson = json.dumps(jsl, ensure_ascii=False)
return Response(resultJson,mimetype="application/json",status=201)
else :
jsl = {'Error': '동일한 부대 데이터가 2개이상 존재합니다.'}
resultJson = json.dumps(jsl, ensure_ascii=False)
return Response(resultJson,mimetype="application/json",status=201)
@unit_page.route('/best/Unit_name=<Unit_name>', methods=['GET'])
def Unit_books_best(Unit_name):
c = 0
for i in database.client.API_test.unittest.distinct('name'):
if i == '{}'.format(Unit_name):
c += 1
if c == 1 :
a = list (database.client.API_test.unittest.find({'name': '{}'.format(Unit_name)}).sort('score',-1).limit(10))
for i in range(0,len(a)):
a[i].update(database.client.API_test.book.find({'isbn': a[i]['isbn']})[0])
del (a[i]['_id'])
result = {'name': '{}'.format(Unit_name), 'books_list': a}
resultJson = json.dumps(result, ensure_ascii=False)
return Response(resultJson,mimetype="application/json",status=200)
else :
if c == 0 :
jsl = {'Error': '부대가 없거나 올바른 부대를 입력해주세요.'}
resultJson = json.dumps(jsl, ensure_ascii=False)
return Response(resultJson,mimetype="application/json",status=201)
else :
jsl = {'Error': '동일한 부대 데이터가 2개이상 존재합니다.'}
resultJson = json.dumps(jsl, ensure_ascii=False)
return Response(resultJson,mimetype="application/json",status=201)
@unit_page.route('/new/Unit_name=<Unit_name>', methods=['GET'])
def Unit_books_new(Unit_name):
c = 0
for i in database.client.API_test.unittest.distinct('name'):
if i == '{}'.format(Unit_name):
c += 1
if c == 1 :
a = list (database.client.API_test.unittest.find({'name': '{}'.format(Unit_name)}).sort('score',1).limit(10))
for i in range(0,len(a)):
a[i].update(database.client.API_test.book.find({'isbn': a[i]['isbn']})[0])
del (a[i]['_id'])
result = {'name': '{}'.format(Unit_name), 'books_list': a}
resultJson = json.dumps(result, ensure_ascii=False)
return Response(resultJson,mimetype="application/json",status=200)
else :
if c == 0 :
jsl = {'Error': '부대가 없거나 올바른 부대를 입력해주세요.'}
resultJson = json.dumps(jsl, ensure_ascii=False)
return Response(resultJson,mimetype="application/json",status=201)
else :
jsl = {'Error': '동일한 부대 데이터가 2개이상 존재합니다.'}
resultJson = json.dumps(jsl, ensure_ascii=False)
return Response(resultJson,mimetype="application/json",status=201)
@unit_page.route('/chk/Unit_name=<Unit_name>&isbn=<isbn>', methods=['GET'])
def Unit_books_list_chk_isbn(Unit_name, isbn):
c = 0
for i in database.client.API_test.unittest.distinct('name'):
if i == '{}'.format(Unit_name):
c += 1
if c == 1 :
a = list (database.client.API_test.unittest.find({'$and':[{'name': '{}'.format(Unit_name)},{'isbn': '{}'.format(isbn)}]}))
for i in range(0,len(a)):
a[i].update(database.client.API_test.book.find({'isbn': a[i]['isbn']})[0])
del (a[i]['_id'])
if a == []:
result = {'Error': 'isbn를 다시 한번 확인해주세요.'}
resultJson = json.dumps(result, ensure_ascii=False)
return Response(resultJson,mimetype="application/json",status=201)
else :
result = {'name': '{}'.format(Unit_name), 'books_list': a}
resultJson = json.dumps(result, ensure_ascii=False)
return Response(resultJson,mimetype="application/json",status=200)
else :
if c == 0 :
jsl = {'Error': '부대가 없거나 올바른 부대를 입력해주세요.'}
resultJson = json.dumps(jsl, ensure_ascii=False)
return Response(resultJson,mimetype="application/json",status=201)
else :
jsl = {'Error': '동일한 부대 데이터가 2개이상 존재합니다.'}
resultJson = json.dumps(jsl, ensure_ascii=False)
return Response(resultJson,mimetype="application/json",status=201)
@unit_page.route('/chk/Unit_name=<Unit_name>&user_id=<user_id>', methods=['GET'])
def Unit_books_list_chk_user_id(Unit_name, user_id):
c = 0
for i in database.client.API_test.unittest.distinct('name'):
if i == '{}'.format(Unit_name):
c += 1
if c == 1 :
a = list (database.client.API_test.unittest.find({'$and':[{'name': '{}'.format(Unit_name)},{'user_id': '{}'.format(user_id)}]}))
for i in range(0,len(a)):
a[i].update(database.client.API_test.book.find({'isbn': a[i]['isbn']})[0])
del (a[i]['_id'])
if a == []:
result = {'Error': '해당 사용자는 대여중인 도서가 없습니다. 또는 user_id를 다시 한번 확인해주세요.'}
resultJson = json.dumps(result, ensure_ascii=False)
return Response(resultJson,mimetype="application/json",status=201)
else :
result = {'name': '{}'.format(Unit_name), 'books_list': a}
resultJson = json.dumps(result, ensure_ascii=False)
return Response(resultJson,mimetype="application/json",status=200)
else :
if c == 0 :
jsl = {'Error': '부대가 없거나 올바른 부대를 입력해주세요.'}
resultJson = json.dumps(jsl, ensure_ascii=False)
return Response(resultJson,mimetype="application/json",status=201)
else :
jsl = {'Error': '동일한 부대 데이터가 2개이상 존재합니다.'}
resultJson = json.dumps(jsl, ensure_ascii=False)
return Response(resultJson,mimetype="application/json",status=201)
@unit_page.route('/brr/Unit_name=<Unit_name>&isbn=<isbn>&user_id=<user_id>', methods=['POST'])
def Unit_books_list_brr(Unit_name, isbn, user_id):
c = 0
for i in database.client.API_test.unittest.distinct('name'):
if i == '{}'.format(Unit_name):
c += 1
if c == 1 :
a = list (database.client.API_test.unittest.find({'$and':[{'name': '{}'.format(Unit_name)},{'isbn': '{}'.format(isbn)}]}))
for i in range(0,len(a)):
if a[i]['isbn'] == '{}'.format(isbn):
if a[i]['state'] == 0:
a[i].update(database.client.API_test.book.find({'isbn': a[i]['isbn']})[0])
del (a[i]['_id'])
database.Unittest.objects(name='{}'.format(Unit_name),isbn='{}'.format(isbn))[0].update(score=a[i]['score']+1,state=1,user_id='{}'.format(user_id))
break
else :
result = {'Error': '대여된 책입니다.'}
resultJson = json.dumps(result, ensure_ascii=False)
return Response(resultJson,mimetype="application/json",status=201)
if a == []:
result = {'Error': 'isbn, user_id를 다시 한번 확인해주세요.'}
resultJson = json.dumps(result, ensure_ascii=False)
return Response(resultJson,mimetype="application/json",status=201)
else :
a = list (database.client.API_test.unittest.find({'$and':[{'name': '{}'.format(Unit_name)},{'isbn': '{}'.format(isbn)}]}))
for i in range(0,len(a)):
a[i].update(database.client.API_test.book.find({'isbn': a[i]['isbn']})[0])
del (a[i]['_id'])
result = {'name': '{}'.format(Unit_name), 'books_list': a}
resultJson = json.dumps(result, ensure_ascii=False)
return Response(resultJson,mimetype="application/json",status=200)
else :
if c == 0 :
jsl = {'Error': '부대가 없거나 올바른 부대를 입력해주세요.'}
resultJson = json.dumps(jsl, ensure_ascii=False)
return Response(resultJson,mimetype="application/json",status=201)
else :
jsl = {'Error': '동일한 부대 데이터가 2개이상 존재합니다.'}
resultJson = json.dumps(jsl, ensure_ascii=False)
return Response(resultJson,mimetype="application/json",status=201)
@unit_page.route('/ret/Unit_name=<Unit_name>&isbn=<isbn>&user_id=<user_id>', methods=['POST'])
def Unit_books_list_ret(Unit_name, isbn, user_id):
c = 0
for i in database.client.API_test.unittest.distinct('name'):
if i == '{}'.format(Unit_name):
c += 1
if c == 1 :
a = list (database.client.API_test.unittest.find({'$and':[{'name': '{}'.format(Unit_name)},{'isbn': '{}'.format(isbn)}]}))
for i in range(0,len(a)):
if a[i]['isbn'] == '{}'.format(isbn):
if a[i]['state'] == 1:
a[i].update(database.client.API_test.book.find({'isbn': a[i]['isbn']})[0])
del (a[i]['_id'])
database.Unittest.objects(name='{}'.format(Unit_name),isbn='{}'.format(isbn))[0].update(state=0,user_id='')
break
else :
result = {'Error': '반납된 책입니다.'}
resultJson = json.dumps(result, ensure_ascii=False)
return Response(resultJson,mimetype="application/json",status=201)
if a == []:
result = {'Error': 'Unit_name, isbn, user_id를 다시 한번 확인해주세요.'}
resultJson = json.dumps(result, ensure_ascii=False)
return Response(resultJson,mimetype="application/json",status=201)
else :
a = list (database.client.API_test.unittest.find({'$and':[{'name': '{}'.format(Unit_name)},{'isbn': '{}'.format(isbn)}]}))
for i in range(0,len(a)):
a[i].update(database.client.API_test.book.find({'isbn': a[i]['isbn']})[0])
del (a[i]['_id'])
result = {'name': '{}'.format(Unit_name), 'books_list': a}
resultJson = json.dumps(result, ensure_ascii=False)
return Response(resultJson,mimetype="application/json",status=200)
else :
if c == 0 :
jsl = {'Error': '부대가 없거나 올바른 부대를 입력해주세요.'}
resultJson = json.dumps(jsl, ensure_ascii=False)
return Response(resultJson,mimetype="application/json",status=201)
else :
jsl = {'Error': '동일한 부대 데이터가 2개이상 존재합니다.'}
resultJson = json.dumps(jsl, ensure_ascii=False)
return Response(resultJson,mimetype="application/json",status=201)
| 49.79476
| 167
| 0.577304
| 1,453
| 11,403
| 4.414315
| 0.066758
| 0.061116
| 0.082944
| 0.09604
| 0.944185
| 0.937636
| 0.924228
| 0.908793
| 0.908793
| 0.896944
| 0
| 0.018313
| 0.24818
| 11,403
| 229
| 168
| 49.79476
| 0.72985
| 0.001228
| 0
| 0.825688
| 0
| 0
| 0.153569
| 0.021688
| 0
| 0
| 0
| 0
| 0
| 1
| 0.036697
| false
| 0
| 0.009174
| 0
| 0.174312
| 0.009174
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 7
|
b5e0898aac1d25243c26ebcca86577a44a2fb47b
| 2,785
|
py
|
Python
|
test/test_symmetries.py
|
tud-ccc/mocasin
|
6cf0a169e24d65d0fc859398f181dd500f928340
|
[
"0BSD"
] | 1
|
2022-03-13T19:27:50.000Z
|
2022-03-13T19:27:50.000Z
|
test/test_symmetries.py
|
tud-ccc/mocasin
|
6cf0a169e24d65d0fc859398f181dd500f928340
|
[
"0BSD"
] | null | null | null |
test/test_symmetries.py
|
tud-ccc/mocasin
|
6cf0a169e24d65d0fc859398f181dd500f928340
|
[
"0BSD"
] | null | null | null |
# Copyright (C) 2020 TU Dresden
# Licensed under the ISC license (see LICENSE.txt)
#
# Authors: Felix Teweleit, Andres Goens
import subprocess
import pytest
import filecmp
import os
@pytest.mark.skip(reason="Test just takes too long.")
def test_symmetries_tgff_large(datadir, large_platform):
subprocess.check_call(
[
"mocasin",
"generate_mapping",
"graph=tgff_reader",
"tgff.directory=tgff/e3s-0.9",
"tgff.file=auto-indust-cords.tgff",
f"platform={large_platform}",
"representation=Symmetries",
"mapper=genetic",
f"outdir={datadir}/mpsym/",
f"platform.symmetries_json=platforms/symmetries/{large_platform}.json",
"trace=tgff_reader",
],
cwd=datadir,
)
subprocess.check_call(
[
"mocasin",
"generate_mapping",
"graph=tgff_reader",
"tgff.directory=tgff/e3s-0.9",
"tgff.file=auto-indust-cords.tgff",
f"platform={large_platform}",
"representation=Symmetries",
"representation.disable_mpsym=true",
"mapper=genetic",
f"outdir={datadir}/python/",
"trace=tgff_reader",
],
cwd=datadir,
)
assert filecmp.cmp(
os.path.join(datadir, "mpsym/best_time.txt"),
os.path.join(datadir, "python/best_time.txt"),
shallow=False,
)
def test_symmetries_tgff_small(datadir, small_platform):
subprocess.check_call(
[
"mocasin",
"generate_mapping",
"graph=tgff_reader",
"tgff.directory=tgff/e3s-0.9",
"tgff.file=auto-indust-cords.tgff",
f"platform={small_platform}",
"representation=Symmetries",
"mapper=genetic",
"mapper.random_seed=42",
f"outdir={datadir}/mpsym/",
f"platform.symmetries_json=platforms/symmetries/{small_platform}.json",
"trace=tgff_reader",
],
cwd=datadir,
)
subprocess.check_call(
[
"mocasin",
"generate_mapping",
"graph=tgff_reader",
"tgff.directory=tgff/e3s-0.9",
"tgff.file=auto-indust-cords.tgff",
f"platform={small_platform}",
"representation=Symmetries",
"representation.disable_mpsym=true",
"mapper=genetic",
"mapper.random_seed=42",
f"outdir={datadir}/python/",
"trace=tgff_reader",
],
cwd=datadir,
)
assert filecmp.cmp(
os.path.join(datadir, "mpsym/best_time.txt"),
os.path.join(datadir, "python/best_time.txt"),
shallow=False,
)
| 28.71134
| 83
| 0.557271
| 279
| 2,785
| 5.419355
| 0.272401
| 0.05291
| 0.050265
| 0.068783
| 0.822751
| 0.814153
| 0.814153
| 0.814153
| 0.814153
| 0.724868
| 0
| 0.010471
| 0.314183
| 2,785
| 96
| 84
| 29.010417
| 0.781152
| 0.041652
| 0
| 0.771084
| 0
| 0
| 0.43506
| 0.28979
| 0
| 0
| 0
| 0
| 0.024096
| 1
| 0.024096
| false
| 0
| 0.048193
| 0
| 0.072289
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 7
|
bd0b0d0f1610bf1b0024862ef6a4aba25e40f98d
| 201
|
py
|
Python
|
ulmo/usgs/ned/__init__.py
|
timcera/ulmo
|
f927a33e1366851aa0656656d0cb0d2068d29c78
|
[
"BSD-3-Clause"
] | 3
|
2017-09-17T21:27:48.000Z
|
2022-03-15T12:58:53.000Z
|
ulmo/usgs/ned/__init__.py
|
timcera/ulmo
|
f927a33e1366851aa0656656d0cb0d2068d29c78
|
[
"BSD-3-Clause"
] | null | null | null |
ulmo/usgs/ned/__init__.py
|
timcera/ulmo
|
f927a33e1366851aa0656656d0cb0d2068d29c78
|
[
"BSD-3-Clause"
] | 3
|
2021-02-23T06:26:00.000Z
|
2021-02-23T06:26:18.000Z
|
"""
`National Elevation Dataset`_ services (Raster)
.. _National Elevation Dataset (NED): http://ned.usgs.gov
"""
from .core import get_available_layers, get_raster, get_raster_availability
| 22.333333
| 75
| 0.736318
| 24
| 201
| 5.875
| 0.666667
| 0.241135
| 0.340426
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.149254
| 201
| 8
| 76
| 25.125
| 0.824561
| 0.532338
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| true
| 0
| 1
| 0
| 1
| 0
| 1
| 0
| 0
| null | 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 0
| 1
| 0
|
0
| 7
|
1feea8d7fff7210b6f911ebb5c5b481a8b398c79
| 102
|
py
|
Python
|
cloeepy_redis/__init__.py
|
cloeeai/CloeePy-Redis
|
3d2e366016f27572802da509e14b3af88c86d789
|
[
"MIT"
] | null | null | null |
cloeepy_redis/__init__.py
|
cloeeai/CloeePy-Redis
|
3d2e366016f27572802da509e14b3af88c86d789
|
[
"MIT"
] | null | null | null |
cloeepy_redis/__init__.py
|
cloeeai/CloeePy-Redis
|
3d2e366016f27572802da509e14b3af88c86d789
|
[
"MIT"
] | null | null | null |
from cloeepy_redis.cloeepy_redis import CloeePyRedis
def get_plugin_class():
return CloeePyRedis
| 20.4
| 52
| 0.833333
| 13
| 102
| 6.230769
| 0.769231
| 0.296296
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.127451
| 102
| 4
| 53
| 25.5
| 0.910112
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0.333333
| true
| 0
| 0.333333
| 0.333333
| 1
| 0
| 1
| 0
| 0
| null | 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 1
| 1
| 0
| 1
| 1
| 1
| 0
|
0
| 8
|
1ff825769e0a75a55f627b64f5f51eb3159e6436
| 101,371
|
py
|
Python
|
nova/tests/unit/api/openstack/test_wsgi.py
|
bopopescu/nova-token
|
ec98f69dea7b3e2b9013b27fd55a2c1a1ac6bfb2
|
[
"Apache-2.0"
] | null | null | null |
nova/tests/unit/api/openstack/test_wsgi.py
|
bopopescu/nova-token
|
ec98f69dea7b3e2b9013b27fd55a2c1a1ac6bfb2
|
[
"Apache-2.0"
] | null | null | null |
nova/tests/unit/api/openstack/test_wsgi.py
|
bopopescu/nova-token
|
ec98f69dea7b3e2b9013b27fd55a2c1a1ac6bfb2
|
[
"Apache-2.0"
] | 2
|
2017-07-20T17:31:34.000Z
|
2020-07-24T02:42:19.000Z
|
begin_unit
comment|'# Licensed under the Apache License, Version 2.0 (the "License"); you may'
nl|'\n'
comment|'# not use this file except in compliance with the License. You may obtain'
nl|'\n'
comment|'# a copy of the License at'
nl|'\n'
comment|'#'
nl|'\n'
comment|'# http://www.apache.org/licenses/LICENSE-2.0'
nl|'\n'
comment|'#'
nl|'\n'
comment|'# Unless required by applicable law or agreed to in writing, software'
nl|'\n'
comment|'# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT'
nl|'\n'
comment|'# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the'
nl|'\n'
comment|'# License for the specific language governing permissions and limitations'
nl|'\n'
comment|'# under the License.'
nl|'\n'
nl|'\n'
name|'import'
name|'inspect'
newline|'\n'
nl|'\n'
name|'import'
name|'mock'
newline|'\n'
name|'import'
name|'six'
newline|'\n'
name|'import'
name|'webob'
newline|'\n'
nl|'\n'
name|'from'
name|'nova'
op|'.'
name|'api'
op|'.'
name|'openstack'
name|'import'
name|'api_version_request'
name|'as'
name|'api_version'
newline|'\n'
name|'from'
name|'nova'
op|'.'
name|'api'
op|'.'
name|'openstack'
name|'import'
name|'extensions'
newline|'\n'
name|'from'
name|'nova'
op|'.'
name|'api'
op|'.'
name|'openstack'
name|'import'
name|'versioned_method'
newline|'\n'
name|'from'
name|'nova'
op|'.'
name|'api'
op|'.'
name|'openstack'
name|'import'
name|'wsgi'
newline|'\n'
name|'from'
name|'nova'
name|'import'
name|'exception'
newline|'\n'
name|'from'
name|'nova'
name|'import'
name|'i18n'
newline|'\n'
name|'from'
name|'nova'
name|'import'
name|'test'
newline|'\n'
name|'from'
name|'nova'
op|'.'
name|'tests'
op|'.'
name|'unit'
op|'.'
name|'api'
op|'.'
name|'openstack'
name|'import'
name|'fakes'
newline|'\n'
name|'from'
name|'nova'
op|'.'
name|'tests'
op|'.'
name|'unit'
name|'import'
name|'matchers'
newline|'\n'
name|'from'
name|'nova'
op|'.'
name|'tests'
op|'.'
name|'unit'
name|'import'
name|'utils'
newline|'\n'
name|'from'
name|'oslo_serialization'
name|'import'
name|'jsonutils'
newline|'\n'
nl|'\n'
nl|'\n'
DECL|class|RequestTest
name|'class'
name|'RequestTest'
op|'('
name|'test'
op|'.'
name|'NoDBTestCase'
op|')'
op|':'
newline|'\n'
DECL|variable|header_name
indent|' '
name|'header_name'
op|'='
string|"'X-OpenStack-Nova-API-Version'"
newline|'\n'
nl|'\n'
DECL|member|test_content_type_missing
name|'def'
name|'test_content_type_missing'
op|'('
name|'self'
op|')'
op|':'
newline|'\n'
indent|' '
name|'request'
op|'='
name|'wsgi'
op|'.'
name|'Request'
op|'.'
name|'blank'
op|'('
string|"'/tests/123'"
op|','
name|'method'
op|'='
string|"'POST'"
op|')'
newline|'\n'
name|'request'
op|'.'
name|'body'
op|'='
string|'b"<body />"'
newline|'\n'
name|'self'
op|'.'
name|'assertIsNone'
op|'('
name|'request'
op|'.'
name|'get_content_type'
op|'('
op|')'
op|')'
newline|'\n'
nl|'\n'
DECL|member|test_content_type_unsupported
dedent|''
name|'def'
name|'test_content_type_unsupported'
op|'('
name|'self'
op|')'
op|':'
newline|'\n'
indent|' '
name|'request'
op|'='
name|'wsgi'
op|'.'
name|'Request'
op|'.'
name|'blank'
op|'('
string|"'/tests/123'"
op|','
name|'method'
op|'='
string|"'POST'"
op|')'
newline|'\n'
name|'request'
op|'.'
name|'headers'
op|'['
string|'"Content-Type"'
op|']'
op|'='
string|'"text/html"'
newline|'\n'
name|'request'
op|'.'
name|'body'
op|'='
string|'b"asdf<br />"'
newline|'\n'
name|'self'
op|'.'
name|'assertRaises'
op|'('
name|'exception'
op|'.'
name|'InvalidContentType'
op|','
nl|'\n'
name|'request'
op|'.'
name|'get_content_type'
op|')'
newline|'\n'
nl|'\n'
DECL|member|test_content_type_with_charset
dedent|''
name|'def'
name|'test_content_type_with_charset'
op|'('
name|'self'
op|')'
op|':'
newline|'\n'
indent|' '
name|'request'
op|'='
name|'wsgi'
op|'.'
name|'Request'
op|'.'
name|'blank'
op|'('
string|"'/tests/123'"
op|')'
newline|'\n'
name|'request'
op|'.'
name|'headers'
op|'['
string|'"Content-Type"'
op|']'
op|'='
string|'"application/json; charset=UTF-8"'
newline|'\n'
name|'result'
op|'='
name|'request'
op|'.'
name|'get_content_type'
op|'('
op|')'
newline|'\n'
name|'self'
op|'.'
name|'assertEqual'
op|'('
name|'result'
op|','
string|'"application/json"'
op|')'
newline|'\n'
nl|'\n'
DECL|member|test_content_type_accept_default
dedent|''
name|'def'
name|'test_content_type_accept_default'
op|'('
name|'self'
op|')'
op|':'
newline|'\n'
indent|' '
name|'request'
op|'='
name|'wsgi'
op|'.'
name|'Request'
op|'.'
name|'blank'
op|'('
string|"'/tests/123.unsupported'"
op|')'
newline|'\n'
name|'request'
op|'.'
name|'headers'
op|'['
string|'"Accept"'
op|']'
op|'='
string|'"application/unsupported1"'
newline|'\n'
name|'result'
op|'='
name|'request'
op|'.'
name|'best_match_content_type'
op|'('
op|')'
newline|'\n'
name|'self'
op|'.'
name|'assertEqual'
op|'('
name|'result'
op|','
string|'"application/json"'
op|')'
newline|'\n'
nl|'\n'
DECL|member|test_cache_and_retrieve_instances
dedent|''
name|'def'
name|'test_cache_and_retrieve_instances'
op|'('
name|'self'
op|')'
op|':'
newline|'\n'
indent|' '
name|'request'
op|'='
name|'wsgi'
op|'.'
name|'Request'
op|'.'
name|'blank'
op|'('
string|"'/foo'"
op|')'
newline|'\n'
name|'instances'
op|'='
op|'['
op|']'
newline|'\n'
name|'for'
name|'x'
name|'in'
name|'range'
op|'('
number|'3'
op|')'
op|':'
newline|'\n'
indent|' '
name|'instances'
op|'.'
name|'append'
op|'('
op|'{'
string|"'uuid'"
op|':'
string|"'uuid%s'"
op|'%'
name|'x'
op|'}'
op|')'
newline|'\n'
comment|'# Store 2'
nl|'\n'
dedent|''
name|'request'
op|'.'
name|'cache_db_instances'
op|'('
name|'instances'
op|'['
op|':'
number|'2'
op|']'
op|')'
newline|'\n'
comment|'# Store 1'
nl|'\n'
name|'request'
op|'.'
name|'cache_db_instance'
op|'('
name|'instances'
op|'['
number|'2'
op|']'
op|')'
newline|'\n'
name|'self'
op|'.'
name|'assertEqual'
op|'('
name|'request'
op|'.'
name|'get_db_instance'
op|'('
string|"'uuid0'"
op|')'
op|','
nl|'\n'
name|'instances'
op|'['
number|'0'
op|']'
op|')'
newline|'\n'
name|'self'
op|'.'
name|'assertEqual'
op|'('
name|'request'
op|'.'
name|'get_db_instance'
op|'('
string|"'uuid1'"
op|')'
op|','
nl|'\n'
name|'instances'
op|'['
number|'1'
op|']'
op|')'
newline|'\n'
name|'self'
op|'.'
name|'assertEqual'
op|'('
name|'request'
op|'.'
name|'get_db_instance'
op|'('
string|"'uuid2'"
op|')'
op|','
nl|'\n'
name|'instances'
op|'['
number|'2'
op|']'
op|')'
newline|'\n'
name|'self'
op|'.'
name|'assertIsNone'
op|'('
name|'request'
op|'.'
name|'get_db_instance'
op|'('
string|"'uuid3'"
op|')'
op|')'
newline|'\n'
name|'self'
op|'.'
name|'assertEqual'
op|'('
name|'request'
op|'.'
name|'get_db_instances'
op|'('
op|')'
op|','
nl|'\n'
op|'{'
string|"'uuid0'"
op|':'
name|'instances'
op|'['
number|'0'
op|']'
op|','
nl|'\n'
string|"'uuid1'"
op|':'
name|'instances'
op|'['
number|'1'
op|']'
op|','
nl|'\n'
string|"'uuid2'"
op|':'
name|'instances'
op|'['
number|'2'
op|']'
op|'}'
op|')'
newline|'\n'
nl|'\n'
DECL|member|test_cache_and_retrieve_compute_nodes
dedent|''
name|'def'
name|'test_cache_and_retrieve_compute_nodes'
op|'('
name|'self'
op|')'
op|':'
newline|'\n'
indent|' '
name|'request'
op|'='
name|'wsgi'
op|'.'
name|'Request'
op|'.'
name|'blank'
op|'('
string|"'/foo'"
op|')'
newline|'\n'
name|'compute_nodes'
op|'='
op|'['
op|']'
newline|'\n'
name|'for'
name|'x'
name|'in'
name|'range'
op|'('
number|'3'
op|')'
op|':'
newline|'\n'
indent|' '
name|'compute_nodes'
op|'.'
name|'append'
op|'('
op|'{'
string|"'id'"
op|':'
string|"'id%s'"
op|'%'
name|'x'
op|'}'
op|')'
newline|'\n'
comment|'# Store 2'
nl|'\n'
dedent|''
name|'request'
op|'.'
name|'cache_db_compute_nodes'
op|'('
name|'compute_nodes'
op|'['
op|':'
number|'2'
op|']'
op|')'
newline|'\n'
comment|'# Store 1'
nl|'\n'
name|'request'
op|'.'
name|'cache_db_compute_node'
op|'('
name|'compute_nodes'
op|'['
number|'2'
op|']'
op|')'
newline|'\n'
name|'self'
op|'.'
name|'assertEqual'
op|'('
name|'request'
op|'.'
name|'get_db_compute_node'
op|'('
string|"'id0'"
op|')'
op|','
nl|'\n'
name|'compute_nodes'
op|'['
number|'0'
op|']'
op|')'
newline|'\n'
name|'self'
op|'.'
name|'assertEqual'
op|'('
name|'request'
op|'.'
name|'get_db_compute_node'
op|'('
string|"'id1'"
op|')'
op|','
nl|'\n'
name|'compute_nodes'
op|'['
number|'1'
op|']'
op|')'
newline|'\n'
name|'self'
op|'.'
name|'assertEqual'
op|'('
name|'request'
op|'.'
name|'get_db_compute_node'
op|'('
string|"'id2'"
op|')'
op|','
nl|'\n'
name|'compute_nodes'
op|'['
number|'2'
op|']'
op|')'
newline|'\n'
name|'self'
op|'.'
name|'assertIsNone'
op|'('
name|'request'
op|'.'
name|'get_db_compute_node'
op|'('
string|"'id3'"
op|')'
op|')'
newline|'\n'
name|'self'
op|'.'
name|'assertEqual'
op|'('
name|'request'
op|'.'
name|'get_db_compute_nodes'
op|'('
op|')'
op|','
nl|'\n'
op|'{'
string|"'id0'"
op|':'
name|'compute_nodes'
op|'['
number|'0'
op|']'
op|','
nl|'\n'
string|"'id1'"
op|':'
name|'compute_nodes'
op|'['
number|'1'
op|']'
op|','
nl|'\n'
string|"'id2'"
op|':'
name|'compute_nodes'
op|'['
number|'2'
op|']'
op|'}'
op|')'
newline|'\n'
nl|'\n'
DECL|member|test_from_request
dedent|''
name|'def'
name|'test_from_request'
op|'('
name|'self'
op|')'
op|':'
newline|'\n'
indent|' '
name|'self'
op|'.'
name|'stubs'
op|'.'
name|'Set'
op|'('
name|'i18n'
op|','
string|"'get_available_languages'"
op|','
nl|'\n'
name|'fakes'
op|'.'
name|'fake_get_available_languages'
op|')'
newline|'\n'
nl|'\n'
name|'request'
op|'='
name|'wsgi'
op|'.'
name|'Request'
op|'.'
name|'blank'
op|'('
string|"'/'"
op|')'
newline|'\n'
name|'accepted'
op|'='
string|"'bogus;q=1.1, en-gb;q=0.7,en-us,en;q=.5,*;q=.7'"
newline|'\n'
name|'request'
op|'.'
name|'headers'
op|'='
op|'{'
string|"'Accept-Language'"
op|':'
name|'accepted'
op|'}'
newline|'\n'
name|'self'
op|'.'
name|'assertEqual'
op|'('
name|'request'
op|'.'
name|'best_match_language'
op|'('
op|')'
op|','
string|"'en_US'"
op|')'
newline|'\n'
nl|'\n'
DECL|member|test_asterisk
dedent|''
name|'def'
name|'test_asterisk'
op|'('
name|'self'
op|')'
op|':'
newline|'\n'
comment|'# asterisk should match first available if there'
nl|'\n'
comment|'# are not any other available matches'
nl|'\n'
indent|' '
name|'self'
op|'.'
name|'stubs'
op|'.'
name|'Set'
op|'('
name|'i18n'
op|','
string|"'get_available_languages'"
op|','
nl|'\n'
name|'fakes'
op|'.'
name|'fake_get_available_languages'
op|')'
newline|'\n'
nl|'\n'
name|'request'
op|'='
name|'wsgi'
op|'.'
name|'Request'
op|'.'
name|'blank'
op|'('
string|"'/'"
op|')'
newline|'\n'
name|'accepted'
op|'='
string|"'*,es;q=.5'"
newline|'\n'
name|'request'
op|'.'
name|'headers'
op|'='
op|'{'
string|"'Accept-Language'"
op|':'
name|'accepted'
op|'}'
newline|'\n'
name|'self'
op|'.'
name|'assertEqual'
op|'('
name|'request'
op|'.'
name|'best_match_language'
op|'('
op|')'
op|','
string|"'en_GB'"
op|')'
newline|'\n'
nl|'\n'
DECL|member|test_prefix
dedent|''
name|'def'
name|'test_prefix'
op|'('
name|'self'
op|')'
op|':'
newline|'\n'
indent|' '
name|'self'
op|'.'
name|'stubs'
op|'.'
name|'Set'
op|'('
name|'i18n'
op|','
string|"'get_available_languages'"
op|','
nl|'\n'
name|'fakes'
op|'.'
name|'fake_get_available_languages'
op|')'
newline|'\n'
nl|'\n'
name|'request'
op|'='
name|'wsgi'
op|'.'
name|'Request'
op|'.'
name|'blank'
op|'('
string|"'/'"
op|')'
newline|'\n'
name|'accepted'
op|'='
string|"'zh'"
newline|'\n'
name|'request'
op|'.'
name|'headers'
op|'='
op|'{'
string|"'Accept-Language'"
op|':'
name|'accepted'
op|'}'
newline|'\n'
name|'self'
op|'.'
name|'assertEqual'
op|'('
name|'request'
op|'.'
name|'best_match_language'
op|'('
op|')'
op|','
string|"'zh_CN'"
op|')'
newline|'\n'
nl|'\n'
DECL|member|test_secondary
dedent|''
name|'def'
name|'test_secondary'
op|'('
name|'self'
op|')'
op|':'
newline|'\n'
indent|' '
name|'self'
op|'.'
name|'stubs'
op|'.'
name|'Set'
op|'('
name|'i18n'
op|','
string|"'get_available_languages'"
op|','
nl|'\n'
name|'fakes'
op|'.'
name|'fake_get_available_languages'
op|')'
newline|'\n'
nl|'\n'
name|'request'
op|'='
name|'wsgi'
op|'.'
name|'Request'
op|'.'
name|'blank'
op|'('
string|"'/'"
op|')'
newline|'\n'
name|'accepted'
op|'='
string|"'nn,en-gb;q=.5'"
newline|'\n'
name|'request'
op|'.'
name|'headers'
op|'='
op|'{'
string|"'Accept-Language'"
op|':'
name|'accepted'
op|'}'
newline|'\n'
name|'self'
op|'.'
name|'assertEqual'
op|'('
name|'request'
op|'.'
name|'best_match_language'
op|'('
op|')'
op|','
string|"'en_GB'"
op|')'
newline|'\n'
nl|'\n'
DECL|member|test_none_found
dedent|''
name|'def'
name|'test_none_found'
op|'('
name|'self'
op|')'
op|':'
newline|'\n'
indent|' '
name|'self'
op|'.'
name|'stubs'
op|'.'
name|'Set'
op|'('
name|'i18n'
op|','
string|"'get_available_languages'"
op|','
nl|'\n'
name|'fakes'
op|'.'
name|'fake_get_available_languages'
op|')'
newline|'\n'
nl|'\n'
name|'request'
op|'='
name|'wsgi'
op|'.'
name|'Request'
op|'.'
name|'blank'
op|'('
string|"'/'"
op|')'
newline|'\n'
name|'accepted'
op|'='
string|"'nb-no'"
newline|'\n'
name|'request'
op|'.'
name|'headers'
op|'='
op|'{'
string|"'Accept-Language'"
op|':'
name|'accepted'
op|'}'
newline|'\n'
name|'self'
op|'.'
name|'assertIs'
op|'('
name|'request'
op|'.'
name|'best_match_language'
op|'('
op|')'
op|','
name|'None'
op|')'
newline|'\n'
nl|'\n'
DECL|member|test_no_lang_header
dedent|''
name|'def'
name|'test_no_lang_header'
op|'('
name|'self'
op|')'
op|':'
newline|'\n'
indent|' '
name|'self'
op|'.'
name|'stubs'
op|'.'
name|'Set'
op|'('
name|'i18n'
op|','
string|"'get_available_languages'"
op|','
nl|'\n'
name|'fakes'
op|'.'
name|'fake_get_available_languages'
op|')'
newline|'\n'
nl|'\n'
name|'request'
op|'='
name|'wsgi'
op|'.'
name|'Request'
op|'.'
name|'blank'
op|'('
string|"'/'"
op|')'
newline|'\n'
name|'accepted'
op|'='
string|"''"
newline|'\n'
name|'request'
op|'.'
name|'headers'
op|'='
op|'{'
string|"'Accept-Language'"
op|':'
name|'accepted'
op|'}'
newline|'\n'
name|'self'
op|'.'
name|'assertIs'
op|'('
name|'request'
op|'.'
name|'best_match_language'
op|'('
op|')'
op|','
name|'None'
op|')'
newline|'\n'
nl|'\n'
DECL|member|test_api_version_request_header_none
dedent|''
name|'def'
name|'test_api_version_request_header_none'
op|'('
name|'self'
op|')'
op|':'
newline|'\n'
indent|' '
name|'request'
op|'='
name|'wsgi'
op|'.'
name|'Request'
op|'.'
name|'blank'
op|'('
string|"'/'"
op|')'
newline|'\n'
name|'request'
op|'.'
name|'set_api_version_request'
op|'('
op|')'
newline|'\n'
name|'self'
op|'.'
name|'assertEqual'
op|'('
name|'api_version'
op|'.'
name|'APIVersionRequest'
op|'('
nl|'\n'
name|'api_version'
op|'.'
name|'DEFAULT_API_VERSION'
op|')'
op|','
name|'request'
op|'.'
name|'api_version_request'
op|')'
newline|'\n'
nl|'\n'
dedent|''
op|'@'
name|'mock'
op|'.'
name|'patch'
op|'('
string|'"nova.api.openstack.api_version_request.max_api_version"'
op|')'
newline|'\n'
DECL|member|test_api_version_request_header
name|'def'
name|'test_api_version_request_header'
op|'('
name|'self'
op|','
name|'mock_maxver'
op|')'
op|':'
newline|'\n'
indent|' '
name|'mock_maxver'
op|'.'
name|'return_value'
op|'='
name|'api_version'
op|'.'
name|'APIVersionRequest'
op|'('
string|'"2.14"'
op|')'
newline|'\n'
nl|'\n'
name|'request'
op|'='
name|'wsgi'
op|'.'
name|'Request'
op|'.'
name|'blank'
op|'('
string|"'/'"
op|')'
newline|'\n'
name|'request'
op|'.'
name|'headers'
op|'='
op|'{'
name|'self'
op|'.'
name|'header_name'
op|':'
string|"'2.14'"
op|'}'
newline|'\n'
name|'request'
op|'.'
name|'set_api_version_request'
op|'('
op|')'
newline|'\n'
name|'self'
op|'.'
name|'assertEqual'
op|'('
name|'api_version'
op|'.'
name|'APIVersionRequest'
op|'('
string|'"2.14"'
op|')'
op|','
nl|'\n'
name|'request'
op|'.'
name|'api_version_request'
op|')'
newline|'\n'
nl|'\n'
dedent|''
op|'@'
name|'mock'
op|'.'
name|'patch'
op|'('
string|'"nova.api.openstack.api_version_request.max_api_version"'
op|')'
newline|'\n'
DECL|member|test_api_version_request_header_latest
name|'def'
name|'test_api_version_request_header_latest'
op|'('
name|'self'
op|','
name|'mock_maxver'
op|')'
op|':'
newline|'\n'
indent|' '
name|'mock_maxver'
op|'.'
name|'return_value'
op|'='
name|'api_version'
op|'.'
name|'APIVersionRequest'
op|'('
string|'"3.5"'
op|')'
newline|'\n'
nl|'\n'
name|'request'
op|'='
name|'wsgi'
op|'.'
name|'Request'
op|'.'
name|'blank'
op|'('
string|"'/'"
op|')'
newline|'\n'
name|'request'
op|'.'
name|'headers'
op|'='
op|'{'
name|'self'
op|'.'
name|'header_name'
op|':'
string|"'latest'"
op|'}'
newline|'\n'
name|'request'
op|'.'
name|'set_api_version_request'
op|'('
op|')'
newline|'\n'
name|'self'
op|'.'
name|'assertEqual'
op|'('
name|'api_version'
op|'.'
name|'APIVersionRequest'
op|'('
string|'"3.5"'
op|')'
op|','
nl|'\n'
name|'request'
op|'.'
name|'api_version_request'
op|')'
newline|'\n'
nl|'\n'
DECL|member|test_api_version_request_header_invalid
dedent|''
name|'def'
name|'test_api_version_request_header_invalid'
op|'('
name|'self'
op|')'
op|':'
newline|'\n'
indent|' '
name|'request'
op|'='
name|'wsgi'
op|'.'
name|'Request'
op|'.'
name|'blank'
op|'('
string|"'/'"
op|')'
newline|'\n'
name|'request'
op|'.'
name|'headers'
op|'='
op|'{'
name|'self'
op|'.'
name|'header_name'
op|':'
string|"'2.1.3'"
op|'}'
newline|'\n'
nl|'\n'
name|'self'
op|'.'
name|'assertRaises'
op|'('
name|'exception'
op|'.'
name|'InvalidAPIVersionString'
op|','
nl|'\n'
name|'request'
op|'.'
name|'set_api_version_request'
op|')'
newline|'\n'
nl|'\n'
nl|'\n'
DECL|class|ActionDispatcherTest
dedent|''
dedent|''
name|'class'
name|'ActionDispatcherTest'
op|'('
name|'test'
op|'.'
name|'NoDBTestCase'
op|')'
op|':'
newline|'\n'
DECL|member|test_dispatch
indent|' '
name|'def'
name|'test_dispatch'
op|'('
name|'self'
op|')'
op|':'
newline|'\n'
indent|' '
name|'serializer'
op|'='
name|'wsgi'
op|'.'
name|'ActionDispatcher'
op|'('
op|')'
newline|'\n'
name|'serializer'
op|'.'
name|'create'
op|'='
name|'lambda'
name|'x'
op|':'
string|"'pants'"
newline|'\n'
name|'self'
op|'.'
name|'assertEqual'
op|'('
name|'serializer'
op|'.'
name|'dispatch'
op|'('
op|'{'
op|'}'
op|','
name|'action'
op|'='
string|"'create'"
op|')'
op|','
string|"'pants'"
op|')'
newline|'\n'
nl|'\n'
DECL|member|test_dispatch_action_None
dedent|''
name|'def'
name|'test_dispatch_action_None'
op|'('
name|'self'
op|')'
op|':'
newline|'\n'
indent|' '
name|'serializer'
op|'='
name|'wsgi'
op|'.'
name|'ActionDispatcher'
op|'('
op|')'
newline|'\n'
name|'serializer'
op|'.'
name|'create'
op|'='
name|'lambda'
name|'x'
op|':'
string|"'pants'"
newline|'\n'
name|'serializer'
op|'.'
name|'default'
op|'='
name|'lambda'
name|'x'
op|':'
string|"'trousers'"
newline|'\n'
name|'self'
op|'.'
name|'assertEqual'
op|'('
name|'serializer'
op|'.'
name|'dispatch'
op|'('
op|'{'
op|'}'
op|','
name|'action'
op|'='
name|'None'
op|')'
op|','
string|"'trousers'"
op|')'
newline|'\n'
nl|'\n'
DECL|member|test_dispatch_default
dedent|''
name|'def'
name|'test_dispatch_default'
op|'('
name|'self'
op|')'
op|':'
newline|'\n'
indent|' '
name|'serializer'
op|'='
name|'wsgi'
op|'.'
name|'ActionDispatcher'
op|'('
op|')'
newline|'\n'
name|'serializer'
op|'.'
name|'create'
op|'='
name|'lambda'
name|'x'
op|':'
string|"'pants'"
newline|'\n'
name|'serializer'
op|'.'
name|'default'
op|'='
name|'lambda'
name|'x'
op|':'
string|"'trousers'"
newline|'\n'
name|'self'
op|'.'
name|'assertEqual'
op|'('
name|'serializer'
op|'.'
name|'dispatch'
op|'('
op|'{'
op|'}'
op|','
name|'action'
op|'='
string|"'update'"
op|')'
op|','
string|"'trousers'"
op|')'
newline|'\n'
nl|'\n'
nl|'\n'
DECL|class|JSONDictSerializerTest
dedent|''
dedent|''
name|'class'
name|'JSONDictSerializerTest'
op|'('
name|'test'
op|'.'
name|'NoDBTestCase'
op|')'
op|':'
newline|'\n'
DECL|member|test_json
indent|' '
name|'def'
name|'test_json'
op|'('
name|'self'
op|')'
op|':'
newline|'\n'
indent|' '
name|'input_dict'
op|'='
name|'dict'
op|'('
name|'servers'
op|'='
name|'dict'
op|'('
name|'a'
op|'='
op|'('
number|'2'
op|','
number|'3'
op|')'
op|')'
op|')'
newline|'\n'
name|'expected_json'
op|'='
string|'\'{"servers":{"a":[2,3]}}\''
newline|'\n'
name|'serializer'
op|'='
name|'wsgi'
op|'.'
name|'JSONDictSerializer'
op|'('
op|')'
newline|'\n'
name|'result'
op|'='
name|'serializer'
op|'.'
name|'serialize'
op|'('
name|'input_dict'
op|')'
newline|'\n'
name|'result'
op|'='
name|'result'
op|'.'
name|'replace'
op|'('
string|"'\\n'"
op|','
string|"''"
op|')'
op|'.'
name|'replace'
op|'('
string|"' '"
op|','
string|"''"
op|')'
newline|'\n'
name|'self'
op|'.'
name|'assertEqual'
op|'('
name|'result'
op|','
name|'expected_json'
op|')'
newline|'\n'
nl|'\n'
nl|'\n'
DECL|class|JSONDeserializerTest
dedent|''
dedent|''
name|'class'
name|'JSONDeserializerTest'
op|'('
name|'test'
op|'.'
name|'NoDBTestCase'
op|')'
op|':'
newline|'\n'
DECL|member|test_json
indent|' '
name|'def'
name|'test_json'
op|'('
name|'self'
op|')'
op|':'
newline|'\n'
indent|' '
name|'data'
op|'='
string|'"""{"a": {\n "a1": "1",\n "a2": "2",\n "bs": ["1", "2", "3", {"c": {"c1": "1"}}],\n "d": {"e": "1"},\n "f": "1"}}"""'
newline|'\n'
name|'as_dict'
op|'='
op|'{'
nl|'\n'
string|"'body'"
op|':'
op|'{'
nl|'\n'
string|"'a'"
op|':'
op|'{'
nl|'\n'
string|"'a1'"
op|':'
string|"'1'"
op|','
nl|'\n'
string|"'a2'"
op|':'
string|"'2'"
op|','
nl|'\n'
string|"'bs'"
op|':'
op|'['
string|"'1'"
op|','
string|"'2'"
op|','
string|"'3'"
op|','
op|'{'
string|"'c'"
op|':'
op|'{'
string|"'c1'"
op|':'
string|"'1'"
op|'}'
op|'}'
op|']'
op|','
nl|'\n'
string|"'d'"
op|':'
op|'{'
string|"'e'"
op|':'
string|"'1'"
op|'}'
op|','
nl|'\n'
string|"'f'"
op|':'
string|"'1'"
op|','
nl|'\n'
op|'}'
op|','
nl|'\n'
op|'}'
op|','
nl|'\n'
op|'}'
newline|'\n'
name|'deserializer'
op|'='
name|'wsgi'
op|'.'
name|'JSONDeserializer'
op|'('
op|')'
newline|'\n'
name|'self'
op|'.'
name|'assertEqual'
op|'('
name|'deserializer'
op|'.'
name|'deserialize'
op|'('
name|'data'
op|')'
op|','
name|'as_dict'
op|')'
newline|'\n'
nl|'\n'
DECL|member|test_json_valid_utf8
dedent|''
name|'def'
name|'test_json_valid_utf8'
op|'('
name|'self'
op|')'
op|':'
newline|'\n'
indent|' '
name|'data'
op|'='
string|'b"""{"server": {"min_count": 1, "flavorRef": "1",\n "name": "\\xe6\\xa6\\x82\\xe5\\xbf\\xb5",\n "imageRef": "10bab10c-1304-47d",\n "max_count": 1}} """'
newline|'\n'
name|'as_dict'
op|'='
op|'{'
nl|'\n'
string|"'body'"
op|':'
op|'{'
nl|'\n'
string|"u'server'"
op|':'
op|'{'
nl|'\n'
string|"u'min_count'"
op|':'
number|'1'
op|','
string|"u'flavorRef'"
op|':'
string|"u'1'"
op|','
nl|'\n'
string|"u'name'"
op|':'
string|"u'\\u6982\\u5ff5'"
op|','
nl|'\n'
string|"u'imageRef'"
op|':'
string|"u'10bab10c-1304-47d'"
op|','
nl|'\n'
string|"u'max_count'"
op|':'
number|'1'
nl|'\n'
op|'}'
nl|'\n'
op|'}'
nl|'\n'
op|'}'
newline|'\n'
name|'deserializer'
op|'='
name|'wsgi'
op|'.'
name|'JSONDeserializer'
op|'('
op|')'
newline|'\n'
name|'self'
op|'.'
name|'assertEqual'
op|'('
name|'deserializer'
op|'.'
name|'deserialize'
op|'('
name|'data'
op|')'
op|','
name|'as_dict'
op|')'
newline|'\n'
nl|'\n'
DECL|member|test_json_invalid_utf8
dedent|''
name|'def'
name|'test_json_invalid_utf8'
op|'('
name|'self'
op|')'
op|':'
newline|'\n'
indent|' '
string|'"""Send invalid utf-8 to JSONDeserializer."""'
newline|'\n'
name|'data'
op|'='
string|'b"""{"server": {"min_count": 1, "flavorRef": "1",\n "name": "\\xf0\\x28\\x8c\\x28",\n "imageRef": "10bab10c-1304-47d",\n "max_count": 1}} """'
newline|'\n'
nl|'\n'
name|'deserializer'
op|'='
name|'wsgi'
op|'.'
name|'JSONDeserializer'
op|'('
op|')'
newline|'\n'
name|'self'
op|'.'
name|'assertRaises'
op|'('
name|'exception'
op|'.'
name|'MalformedRequestBody'
op|','
nl|'\n'
name|'deserializer'
op|'.'
name|'deserialize'
op|','
name|'data'
op|')'
newline|'\n'
nl|'\n'
nl|'\n'
DECL|class|ResourceTest
dedent|''
dedent|''
name|'class'
name|'ResourceTest'
op|'('
name|'test'
op|'.'
name|'NoDBTestCase'
op|')'
op|':'
newline|'\n'
DECL|variable|header_name
indent|' '
name|'header_name'
op|'='
string|"'X-OpenStack-Nova-API-Version'"
newline|'\n'
nl|'\n'
DECL|member|get_req_id_header_name
name|'def'
name|'get_req_id_header_name'
op|'('
name|'self'
op|','
name|'request'
op|')'
op|':'
newline|'\n'
indent|' '
name|'header_name'
op|'='
string|"'x-openstack-request-id'"
newline|'\n'
name|'if'
name|'utils'
op|'.'
name|'get_api_version'
op|'('
name|'request'
op|')'
op|'<'
number|'3'
op|':'
newline|'\n'
indent|' '
name|'header_name'
op|'='
string|"'x-compute-request-id'"
newline|'\n'
nl|'\n'
dedent|''
name|'return'
name|'header_name'
newline|'\n'
nl|'\n'
DECL|member|test_resource_receives_api_version_request_default
dedent|''
name|'def'
name|'test_resource_receives_api_version_request_default'
op|'('
name|'self'
op|')'
op|':'
newline|'\n'
DECL|class|Controller
indent|' '
name|'class'
name|'Controller'
op|'('
name|'object'
op|')'
op|':'
newline|'\n'
DECL|member|index
indent|' '
name|'def'
name|'index'
op|'('
name|'self'
op|','
name|'req'
op|')'
op|':'
newline|'\n'
indent|' '
name|'if'
name|'req'
op|'.'
name|'api_version_request'
op|'!='
name|'api_version'
op|'.'
name|'APIVersionRequest'
op|'('
nl|'\n'
name|'api_version'
op|'.'
name|'DEFAULT_API_VERSION'
op|')'
op|':'
newline|'\n'
indent|' '
name|'raise'
name|'webob'
op|'.'
name|'exc'
op|'.'
name|'HTTPInternalServerError'
op|'('
op|')'
newline|'\n'
dedent|''
name|'return'
string|"'success'"
newline|'\n'
nl|'\n'
dedent|''
dedent|''
name|'app'
op|'='
name|'fakes'
op|'.'
name|'TestRouterV21'
op|'('
name|'Controller'
op|'('
op|')'
op|')'
newline|'\n'
name|'req'
op|'='
name|'webob'
op|'.'
name|'Request'
op|'.'
name|'blank'
op|'('
string|"'/tests'"
op|')'
newline|'\n'
name|'response'
op|'='
name|'req'
op|'.'
name|'get_response'
op|'('
name|'app'
op|')'
newline|'\n'
name|'self'
op|'.'
name|'assertEqual'
op|'('
string|"b'success'"
op|','
name|'response'
op|'.'
name|'body'
op|')'
newline|'\n'
name|'self'
op|'.'
name|'assertEqual'
op|'('
name|'response'
op|'.'
name|'status_int'
op|','
number|'200'
op|')'
newline|'\n'
nl|'\n'
dedent|''
op|'@'
name|'mock'
op|'.'
name|'patch'
op|'('
string|'"nova.api.openstack.api_version_request.max_api_version"'
op|')'
newline|'\n'
DECL|member|test_resource_receives_api_version_request
name|'def'
name|'test_resource_receives_api_version_request'
op|'('
name|'self'
op|','
name|'mock_maxver'
op|')'
op|':'
newline|'\n'
indent|' '
name|'version'
op|'='
string|'"2.5"'
newline|'\n'
name|'mock_maxver'
op|'.'
name|'return_value'
op|'='
name|'api_version'
op|'.'
name|'APIVersionRequest'
op|'('
name|'version'
op|')'
newline|'\n'
nl|'\n'
DECL|class|Controller
name|'class'
name|'Controller'
op|'('
name|'object'
op|')'
op|':'
newline|'\n'
DECL|member|index
indent|' '
name|'def'
name|'index'
op|'('
name|'self'
op|','
name|'req'
op|')'
op|':'
newline|'\n'
indent|' '
name|'if'
name|'req'
op|'.'
name|'api_version_request'
op|'!='
name|'api_version'
op|'.'
name|'APIVersionRequest'
op|'('
name|'version'
op|')'
op|':'
newline|'\n'
indent|' '
name|'raise'
name|'webob'
op|'.'
name|'exc'
op|'.'
name|'HTTPInternalServerError'
op|'('
op|')'
newline|'\n'
dedent|''
name|'return'
string|"'success'"
newline|'\n'
nl|'\n'
dedent|''
dedent|''
name|'app'
op|'='
name|'fakes'
op|'.'
name|'TestRouterV21'
op|'('
name|'Controller'
op|'('
op|')'
op|')'
newline|'\n'
name|'req'
op|'='
name|'webob'
op|'.'
name|'Request'
op|'.'
name|'blank'
op|'('
string|"'/tests'"
op|')'
newline|'\n'
name|'req'
op|'.'
name|'headers'
op|'='
op|'{'
name|'self'
op|'.'
name|'header_name'
op|':'
name|'version'
op|'}'
newline|'\n'
name|'response'
op|'='
name|'req'
op|'.'
name|'get_response'
op|'('
name|'app'
op|')'
newline|'\n'
name|'self'
op|'.'
name|'assertEqual'
op|'('
string|"b'success'"
op|','
name|'response'
op|'.'
name|'body'
op|')'
newline|'\n'
name|'self'
op|'.'
name|'assertEqual'
op|'('
name|'response'
op|'.'
name|'status_int'
op|','
number|'200'
op|')'
newline|'\n'
nl|'\n'
DECL|member|test_resource_receives_api_version_request_invalid
dedent|''
name|'def'
name|'test_resource_receives_api_version_request_invalid'
op|'('
name|'self'
op|')'
op|':'
newline|'\n'
indent|' '
name|'invalid_version'
op|'='
string|'"2.5.3"'
newline|'\n'
nl|'\n'
DECL|class|Controller
name|'class'
name|'Controller'
op|'('
name|'object'
op|')'
op|':'
newline|'\n'
DECL|member|index
indent|' '
name|'def'
name|'index'
op|'('
name|'self'
op|','
name|'req'
op|')'
op|':'
newline|'\n'
indent|' '
name|'return'
string|"'success'"
newline|'\n'
nl|'\n'
dedent|''
dedent|''
name|'app'
op|'='
name|'fakes'
op|'.'
name|'TestRouterV21'
op|'('
name|'Controller'
op|'('
op|')'
op|')'
newline|'\n'
name|'req'
op|'='
name|'webob'
op|'.'
name|'Request'
op|'.'
name|'blank'
op|'('
string|"'/tests'"
op|')'
newline|'\n'
name|'req'
op|'.'
name|'headers'
op|'='
op|'{'
name|'self'
op|'.'
name|'header_name'
op|':'
name|'invalid_version'
op|'}'
newline|'\n'
name|'response'
op|'='
name|'req'
op|'.'
name|'get_response'
op|'('
name|'app'
op|')'
newline|'\n'
name|'self'
op|'.'
name|'assertEqual'
op|'('
number|'400'
op|','
name|'response'
op|'.'
name|'status_int'
op|')'
newline|'\n'
nl|'\n'
DECL|member|test_resource_call_with_method_get
dedent|''
name|'def'
name|'test_resource_call_with_method_get'
op|'('
name|'self'
op|')'
op|':'
newline|'\n'
DECL|class|Controller
indent|' '
name|'class'
name|'Controller'
op|'('
name|'object'
op|')'
op|':'
newline|'\n'
DECL|member|index
indent|' '
name|'def'
name|'index'
op|'('
name|'self'
op|','
name|'req'
op|')'
op|':'
newline|'\n'
indent|' '
name|'return'
string|"'success'"
newline|'\n'
nl|'\n'
dedent|''
dedent|''
name|'app'
op|'='
name|'fakes'
op|'.'
name|'TestRouter'
op|'('
name|'Controller'
op|'('
op|')'
op|')'
newline|'\n'
comment|'# the default method is GET'
nl|'\n'
name|'req'
op|'='
name|'webob'
op|'.'
name|'Request'
op|'.'
name|'blank'
op|'('
string|"'/tests'"
op|')'
newline|'\n'
name|'response'
op|'='
name|'req'
op|'.'
name|'get_response'
op|'('
name|'app'
op|')'
newline|'\n'
name|'self'
op|'.'
name|'assertEqual'
op|'('
string|"b'success'"
op|','
name|'response'
op|'.'
name|'body'
op|')'
newline|'\n'
name|'self'
op|'.'
name|'assertEqual'
op|'('
name|'response'
op|'.'
name|'status_int'
op|','
number|'200'
op|')'
newline|'\n'
name|'req'
op|'.'
name|'body'
op|'='
string|'b\'{"body": {"key": "value"}}\''
newline|'\n'
name|'response'
op|'='
name|'req'
op|'.'
name|'get_response'
op|'('
name|'app'
op|')'
newline|'\n'
name|'self'
op|'.'
name|'assertEqual'
op|'('
string|"b'success'"
op|','
name|'response'
op|'.'
name|'body'
op|')'
newline|'\n'
name|'self'
op|'.'
name|'assertEqual'
op|'('
name|'response'
op|'.'
name|'status_int'
op|','
number|'200'
op|')'
newline|'\n'
name|'req'
op|'.'
name|'content_type'
op|'='
string|"'application/json'"
newline|'\n'
name|'response'
op|'='
name|'req'
op|'.'
name|'get_response'
op|'('
name|'app'
op|')'
newline|'\n'
name|'self'
op|'.'
name|'assertEqual'
op|'('
string|"b'success'"
op|','
name|'response'
op|'.'
name|'body'
op|')'
newline|'\n'
name|'self'
op|'.'
name|'assertEqual'
op|'('
name|'response'
op|'.'
name|'status_int'
op|','
number|'200'
op|')'
newline|'\n'
nl|'\n'
DECL|member|test_resource_call_with_method_post
dedent|''
name|'def'
name|'test_resource_call_with_method_post'
op|'('
name|'self'
op|')'
op|':'
newline|'\n'
DECL|class|Controller
indent|' '
name|'class'
name|'Controller'
op|'('
name|'object'
op|')'
op|':'
newline|'\n'
indent|' '
op|'@'
name|'extensions'
op|'.'
name|'expected_errors'
op|'('
number|'400'
op|')'
newline|'\n'
DECL|member|create
name|'def'
name|'create'
op|'('
name|'self'
op|','
name|'req'
op|','
name|'body'
op|')'
op|':'
newline|'\n'
indent|' '
name|'if'
name|'expected_body'
op|'!='
name|'body'
op|':'
newline|'\n'
indent|' '
name|'msg'
op|'='
string|'"The request body invalid"'
newline|'\n'
name|'raise'
name|'webob'
op|'.'
name|'exc'
op|'.'
name|'HTTPBadRequest'
op|'('
name|'explanation'
op|'='
name|'msg'
op|')'
newline|'\n'
dedent|''
name|'return'
string|'"success"'
newline|'\n'
comment|'# verify the method: POST'
nl|'\n'
dedent|''
dedent|''
name|'app'
op|'='
name|'fakes'
op|'.'
name|'TestRouter'
op|'('
name|'Controller'
op|'('
op|')'
op|')'
newline|'\n'
name|'req'
op|'='
name|'webob'
op|'.'
name|'Request'
op|'.'
name|'blank'
op|'('
string|"'/tests'"
op|','
name|'method'
op|'='
string|'"POST"'
op|','
nl|'\n'
name|'content_type'
op|'='
string|"'application/json'"
op|')'
newline|'\n'
name|'req'
op|'.'
name|'body'
op|'='
string|'b\'{"body": {"key": "value"}}\''
newline|'\n'
name|'expected_body'
op|'='
op|'{'
string|"'body'"
op|':'
op|'{'
nl|'\n'
string|'"key"'
op|':'
string|'"value"'
nl|'\n'
op|'}'
nl|'\n'
op|'}'
newline|'\n'
name|'response'
op|'='
name|'req'
op|'.'
name|'get_response'
op|'('
name|'app'
op|')'
newline|'\n'
name|'self'
op|'.'
name|'assertEqual'
op|'('
name|'response'
op|'.'
name|'status_int'
op|','
number|'200'
op|')'
newline|'\n'
name|'self'
op|'.'
name|'assertEqual'
op|'('
string|"b'success'"
op|','
name|'response'
op|'.'
name|'body'
op|')'
newline|'\n'
comment|'# verify without body'
nl|'\n'
name|'expected_body'
op|'='
name|'None'
newline|'\n'
name|'req'
op|'.'
name|'body'
op|'='
name|'None'
newline|'\n'
name|'response'
op|'='
name|'req'
op|'.'
name|'get_response'
op|'('
name|'app'
op|')'
newline|'\n'
name|'self'
op|'.'
name|'assertEqual'
op|'('
name|'response'
op|'.'
name|'status_int'
op|','
number|'200'
op|')'
newline|'\n'
name|'self'
op|'.'
name|'assertEqual'
op|'('
string|"b'success'"
op|','
name|'response'
op|'.'
name|'body'
op|')'
newline|'\n'
comment|'# the body is validated in the controller'
nl|'\n'
name|'expected_body'
op|'='
op|'{'
string|"'body'"
op|':'
name|'None'
op|'}'
newline|'\n'
name|'response'
op|'='
name|'req'
op|'.'
name|'get_response'
op|'('
name|'app'
op|')'
newline|'\n'
name|'expected_unsupported_type_body'
op|'='
op|'{'
string|"'badRequest'"
op|':'
nl|'\n'
op|'{'
string|"'message'"
op|':'
string|"'The request body invalid'"
op|','
string|"'code'"
op|':'
number|'400'
op|'}'
op|'}'
newline|'\n'
name|'self'
op|'.'
name|'assertEqual'
op|'('
name|'response'
op|'.'
name|'status_int'
op|','
number|'400'
op|')'
newline|'\n'
name|'self'
op|'.'
name|'assertEqual'
op|'('
name|'expected_unsupported_type_body'
op|','
nl|'\n'
name|'jsonutils'
op|'.'
name|'loads'
op|'('
name|'response'
op|'.'
name|'body'
op|')'
op|')'
newline|'\n'
nl|'\n'
DECL|member|test_resource_call_with_method_put
dedent|''
name|'def'
name|'test_resource_call_with_method_put'
op|'('
name|'self'
op|')'
op|':'
newline|'\n'
DECL|class|Controller
indent|' '
name|'class'
name|'Controller'
op|'('
name|'object'
op|')'
op|':'
newline|'\n'
DECL|member|update
indent|' '
name|'def'
name|'update'
op|'('
name|'self'
op|','
name|'req'
op|','
name|'id'
op|','
name|'body'
op|')'
op|':'
newline|'\n'
indent|' '
name|'if'
name|'expected_body'
op|'!='
name|'body'
op|':'
newline|'\n'
indent|' '
name|'msg'
op|'='
string|'"The request body invalid"'
newline|'\n'
name|'raise'
name|'webob'
op|'.'
name|'exc'
op|'.'
name|'HTTPBadRequest'
op|'('
name|'explanation'
op|'='
name|'msg'
op|')'
newline|'\n'
dedent|''
name|'return'
string|'"success"'
newline|'\n'
comment|'# verify the method: PUT'
nl|'\n'
dedent|''
dedent|''
name|'app'
op|'='
name|'fakes'
op|'.'
name|'TestRouter'
op|'('
name|'Controller'
op|'('
op|')'
op|')'
newline|'\n'
name|'req'
op|'='
name|'webob'
op|'.'
name|'Request'
op|'.'
name|'blank'
op|'('
string|"'/tests/test_id'"
op|','
name|'method'
op|'='
string|'"PUT"'
op|','
nl|'\n'
name|'content_type'
op|'='
string|"'application/json'"
op|')'
newline|'\n'
name|'req'
op|'.'
name|'body'
op|'='
string|'b\'{"body": {"key": "value"}}\''
newline|'\n'
name|'expected_body'
op|'='
op|'{'
string|"'body'"
op|':'
op|'{'
nl|'\n'
string|'"key"'
op|':'
string|'"value"'
nl|'\n'
op|'}'
nl|'\n'
op|'}'
newline|'\n'
name|'response'
op|'='
name|'req'
op|'.'
name|'get_response'
op|'('
name|'app'
op|')'
newline|'\n'
name|'self'
op|'.'
name|'assertEqual'
op|'('
string|"b'success'"
op|','
name|'response'
op|'.'
name|'body'
op|')'
newline|'\n'
name|'self'
op|'.'
name|'assertEqual'
op|'('
name|'response'
op|'.'
name|'status_int'
op|','
number|'200'
op|')'
newline|'\n'
name|'req'
op|'.'
name|'body'
op|'='
name|'None'
newline|'\n'
name|'expected_body'
op|'='
name|'None'
newline|'\n'
name|'response'
op|'='
name|'req'
op|'.'
name|'get_response'
op|'('
name|'app'
op|')'
newline|'\n'
name|'self'
op|'.'
name|'assertEqual'
op|'('
name|'response'
op|'.'
name|'status_int'
op|','
number|'200'
op|')'
newline|'\n'
comment|'# verify no content_type is contained in the request'
nl|'\n'
name|'req'
op|'='
name|'webob'
op|'.'
name|'Request'
op|'.'
name|'blank'
op|'('
string|"'/tests/test_id'"
op|','
name|'method'
op|'='
string|'"PUT"'
op|','
nl|'\n'
name|'content_type'
op|'='
string|"'application/xml'"
op|')'
newline|'\n'
name|'req'
op|'.'
name|'content_type'
op|'='
string|"'application/xml'"
newline|'\n'
name|'req'
op|'.'
name|'body'
op|'='
string|'b\'{"body": {"key": "value"}}\''
newline|'\n'
name|'response'
op|'='
name|'req'
op|'.'
name|'get_response'
op|'('
name|'app'
op|')'
newline|'\n'
name|'expected_unsupported_type_body'
op|'='
op|'{'
string|"'badMediaType'"
op|':'
nl|'\n'
op|'{'
string|"'message'"
op|':'
string|"'Unsupported Content-Type'"
op|','
string|"'code'"
op|':'
number|'415'
op|'}'
op|'}'
newline|'\n'
name|'self'
op|'.'
name|'assertEqual'
op|'('
name|'response'
op|'.'
name|'status_int'
op|','
number|'415'
op|')'
newline|'\n'
name|'self'
op|'.'
name|'assertEqual'
op|'('
name|'expected_unsupported_type_body'
op|','
nl|'\n'
name|'jsonutils'
op|'.'
name|'loads'
op|'('
name|'response'
op|'.'
name|'body'
op|')'
op|')'
newline|'\n'
nl|'\n'
DECL|member|test_resource_call_with_method_delete
dedent|''
name|'def'
name|'test_resource_call_with_method_delete'
op|'('
name|'self'
op|')'
op|':'
newline|'\n'
DECL|class|Controller
indent|' '
name|'class'
name|'Controller'
op|'('
name|'object'
op|')'
op|':'
newline|'\n'
DECL|member|delete
indent|' '
name|'def'
name|'delete'
op|'('
name|'self'
op|','
name|'req'
op|','
name|'id'
op|')'
op|':'
newline|'\n'
indent|' '
name|'return'
string|'"success"'
newline|'\n'
nl|'\n'
comment|'# verify the method: DELETE'
nl|'\n'
dedent|''
dedent|''
name|'app'
op|'='
name|'fakes'
op|'.'
name|'TestRouter'
op|'('
name|'Controller'
op|'('
op|')'
op|')'
newline|'\n'
name|'req'
op|'='
name|'webob'
op|'.'
name|'Request'
op|'.'
name|'blank'
op|'('
string|"'/tests/test_id'"
op|','
name|'method'
op|'='
string|'"DELETE"'
op|')'
newline|'\n'
name|'response'
op|'='
name|'req'
op|'.'
name|'get_response'
op|'('
name|'app'
op|')'
newline|'\n'
name|'self'
op|'.'
name|'assertEqual'
op|'('
name|'response'
op|'.'
name|'status_int'
op|','
number|'200'
op|')'
newline|'\n'
name|'self'
op|'.'
name|'assertEqual'
op|'('
string|"b'success'"
op|','
name|'response'
op|'.'
name|'body'
op|')'
newline|'\n'
comment|'# ignore the body'
nl|'\n'
name|'req'
op|'.'
name|'body'
op|'='
string|'b\'{"body": {"key": "value"}}\''
newline|'\n'
name|'response'
op|'='
name|'req'
op|'.'
name|'get_response'
op|'('
name|'app'
op|')'
newline|'\n'
name|'self'
op|'.'
name|'assertEqual'
op|'('
name|'response'
op|'.'
name|'status_int'
op|','
number|'200'
op|')'
newline|'\n'
name|'self'
op|'.'
name|'assertEqual'
op|'('
string|"b'success'"
op|','
name|'response'
op|'.'
name|'body'
op|')'
newline|'\n'
nl|'\n'
DECL|member|test_resource_not_authorized
dedent|''
name|'def'
name|'test_resource_not_authorized'
op|'('
name|'self'
op|')'
op|':'
newline|'\n'
DECL|class|Controller
indent|' '
name|'class'
name|'Controller'
op|'('
name|'object'
op|')'
op|':'
newline|'\n'
DECL|member|index
indent|' '
name|'def'
name|'index'
op|'('
name|'self'
op|','
name|'req'
op|')'
op|':'
newline|'\n'
indent|' '
name|'raise'
name|'exception'
op|'.'
name|'Forbidden'
op|'('
op|')'
newline|'\n'
nl|'\n'
dedent|''
dedent|''
name|'req'
op|'='
name|'webob'
op|'.'
name|'Request'
op|'.'
name|'blank'
op|'('
string|"'/tests'"
op|')'
newline|'\n'
name|'app'
op|'='
name|'fakes'
op|'.'
name|'TestRouter'
op|'('
name|'Controller'
op|'('
op|')'
op|')'
newline|'\n'
name|'response'
op|'='
name|'req'
op|'.'
name|'get_response'
op|'('
name|'app'
op|')'
newline|'\n'
name|'self'
op|'.'
name|'assertEqual'
op|'('
name|'response'
op|'.'
name|'status_int'
op|','
number|'403'
op|')'
newline|'\n'
nl|'\n'
DECL|member|test_dispatch
dedent|''
name|'def'
name|'test_dispatch'
op|'('
name|'self'
op|')'
op|':'
newline|'\n'
DECL|class|Controller
indent|' '
name|'class'
name|'Controller'
op|'('
name|'object'
op|')'
op|':'
newline|'\n'
DECL|member|index
indent|' '
name|'def'
name|'index'
op|'('
name|'self'
op|','
name|'req'
op|','
name|'pants'
op|'='
name|'None'
op|')'
op|':'
newline|'\n'
indent|' '
name|'return'
name|'pants'
newline|'\n'
nl|'\n'
dedent|''
dedent|''
name|'controller'
op|'='
name|'Controller'
op|'('
op|')'
newline|'\n'
name|'resource'
op|'='
name|'wsgi'
op|'.'
name|'Resource'
op|'('
name|'controller'
op|')'
newline|'\n'
name|'method'
op|','
name|'extensions'
op|'='
name|'resource'
op|'.'
name|'get_method'
op|'('
name|'None'
op|','
string|"'index'"
op|','
name|'None'
op|','
string|"''"
op|')'
newline|'\n'
name|'actual'
op|'='
name|'resource'
op|'.'
name|'dispatch'
op|'('
name|'method'
op|','
name|'None'
op|','
op|'{'
string|"'pants'"
op|':'
string|"'off'"
op|'}'
op|')'
newline|'\n'
name|'expected'
op|'='
string|"'off'"
newline|'\n'
name|'self'
op|'.'
name|'assertEqual'
op|'('
name|'actual'
op|','
name|'expected'
op|')'
newline|'\n'
nl|'\n'
DECL|member|test_get_method_unknown_controller_method
dedent|''
name|'def'
name|'test_get_method_unknown_controller_method'
op|'('
name|'self'
op|')'
op|':'
newline|'\n'
DECL|class|Controller
indent|' '
name|'class'
name|'Controller'
op|'('
name|'object'
op|')'
op|':'
newline|'\n'
DECL|member|index
indent|' '
name|'def'
name|'index'
op|'('
name|'self'
op|','
name|'req'
op|','
name|'pants'
op|'='
name|'None'
op|')'
op|':'
newline|'\n'
indent|' '
name|'return'
name|'pants'
newline|'\n'
nl|'\n'
dedent|''
dedent|''
name|'controller'
op|'='
name|'Controller'
op|'('
op|')'
newline|'\n'
name|'resource'
op|'='
name|'wsgi'
op|'.'
name|'Resource'
op|'('
name|'controller'
op|')'
newline|'\n'
name|'self'
op|'.'
name|'assertRaises'
op|'('
name|'AttributeError'
op|','
name|'resource'
op|'.'
name|'get_method'
op|','
nl|'\n'
name|'None'
op|','
string|"'create'"
op|','
name|'None'
op|','
string|"''"
op|')'
newline|'\n'
nl|'\n'
DECL|member|test_get_method_action_json
dedent|''
name|'def'
name|'test_get_method_action_json'
op|'('
name|'self'
op|')'
op|':'
newline|'\n'
DECL|class|Controller
indent|' '
name|'class'
name|'Controller'
op|'('
name|'wsgi'
op|'.'
name|'Controller'
op|')'
op|':'
newline|'\n'
indent|' '
op|'@'
name|'wsgi'
op|'.'
name|'action'
op|'('
string|"'fooAction'"
op|')'
newline|'\n'
DECL|member|_action_foo
name|'def'
name|'_action_foo'
op|'('
name|'self'
op|','
name|'req'
op|','
name|'id'
op|','
name|'body'
op|')'
op|':'
newline|'\n'
indent|' '
name|'return'
name|'body'
newline|'\n'
nl|'\n'
dedent|''
dedent|''
name|'controller'
op|'='
name|'Controller'
op|'('
op|')'
newline|'\n'
name|'resource'
op|'='
name|'wsgi'
op|'.'
name|'Resource'
op|'('
name|'controller'
op|')'
newline|'\n'
name|'method'
op|','
name|'extensions'
op|'='
name|'resource'
op|'.'
name|'get_method'
op|'('
name|'None'
op|','
string|"'action'"
op|','
nl|'\n'
string|"'application/json'"
op|','
nl|'\n'
string|'\'{"fooAction": true}\''
op|')'
newline|'\n'
name|'self'
op|'.'
name|'assertEqual'
op|'('
name|'controller'
op|'.'
name|'_action_foo'
op|','
name|'method'
op|')'
newline|'\n'
nl|'\n'
DECL|member|test_get_method_action_bad_body
dedent|''
name|'def'
name|'test_get_method_action_bad_body'
op|'('
name|'self'
op|')'
op|':'
newline|'\n'
DECL|class|Controller
indent|' '
name|'class'
name|'Controller'
op|'('
name|'wsgi'
op|'.'
name|'Controller'
op|')'
op|':'
newline|'\n'
indent|' '
op|'@'
name|'wsgi'
op|'.'
name|'action'
op|'('
string|"'fooAction'"
op|')'
newline|'\n'
DECL|member|_action_foo
name|'def'
name|'_action_foo'
op|'('
name|'self'
op|','
name|'req'
op|','
name|'id'
op|','
name|'body'
op|')'
op|':'
newline|'\n'
indent|' '
name|'return'
name|'body'
newline|'\n'
nl|'\n'
dedent|''
dedent|''
name|'controller'
op|'='
name|'Controller'
op|'('
op|')'
newline|'\n'
name|'resource'
op|'='
name|'wsgi'
op|'.'
name|'Resource'
op|'('
name|'controller'
op|')'
newline|'\n'
name|'self'
op|'.'
name|'assertRaises'
op|'('
name|'exception'
op|'.'
name|'MalformedRequestBody'
op|','
name|'resource'
op|'.'
name|'get_method'
op|','
nl|'\n'
name|'None'
op|','
string|"'action'"
op|','
string|"'application/json'"
op|','
string|"'{}'"
op|')'
newline|'\n'
nl|'\n'
DECL|member|test_get_method_unknown_controller_action
dedent|''
name|'def'
name|'test_get_method_unknown_controller_action'
op|'('
name|'self'
op|')'
op|':'
newline|'\n'
DECL|class|Controller
indent|' '
name|'class'
name|'Controller'
op|'('
name|'wsgi'
op|'.'
name|'Controller'
op|')'
op|':'
newline|'\n'
indent|' '
op|'@'
name|'wsgi'
op|'.'
name|'action'
op|'('
string|"'fooAction'"
op|')'
newline|'\n'
DECL|member|_action_foo
name|'def'
name|'_action_foo'
op|'('
name|'self'
op|','
name|'req'
op|','
name|'id'
op|','
name|'body'
op|')'
op|':'
newline|'\n'
indent|' '
name|'return'
name|'body'
newline|'\n'
nl|'\n'
dedent|''
dedent|''
name|'controller'
op|'='
name|'Controller'
op|'('
op|')'
newline|'\n'
name|'resource'
op|'='
name|'wsgi'
op|'.'
name|'Resource'
op|'('
name|'controller'
op|')'
newline|'\n'
name|'self'
op|'.'
name|'assertRaises'
op|'('
name|'KeyError'
op|','
name|'resource'
op|'.'
name|'get_method'
op|','
nl|'\n'
name|'None'
op|','
string|"'action'"
op|','
string|"'application/json'"
op|','
nl|'\n'
string|'\'{"barAction": true}\''
op|')'
newline|'\n'
nl|'\n'
DECL|member|test_get_method_action_method
dedent|''
name|'def'
name|'test_get_method_action_method'
op|'('
name|'self'
op|')'
op|':'
newline|'\n'
DECL|class|Controller
indent|' '
name|'class'
name|'Controller'
op|'('
name|'object'
op|')'
op|':'
newline|'\n'
DECL|member|action
indent|' '
name|'def'
name|'action'
op|'('
name|'self'
op|','
name|'req'
op|','
name|'pants'
op|'='
name|'None'
op|')'
op|':'
newline|'\n'
indent|' '
name|'return'
name|'pants'
newline|'\n'
nl|'\n'
dedent|''
dedent|''
name|'controller'
op|'='
name|'Controller'
op|'('
op|')'
newline|'\n'
name|'resource'
op|'='
name|'wsgi'
op|'.'
name|'Resource'
op|'('
name|'controller'
op|')'
newline|'\n'
name|'method'
op|','
name|'extensions'
op|'='
name|'resource'
op|'.'
name|'get_method'
op|'('
name|'None'
op|','
string|"'action'"
op|','
nl|'\n'
string|"'application/xml'"
op|','
nl|'\n'
string|"'<fooAction>true</fooAction'"
op|')'
newline|'\n'
name|'self'
op|'.'
name|'assertEqual'
op|'('
name|'controller'
op|'.'
name|'action'
op|','
name|'method'
op|')'
newline|'\n'
nl|'\n'
DECL|member|test_get_action_args
dedent|''
name|'def'
name|'test_get_action_args'
op|'('
name|'self'
op|')'
op|':'
newline|'\n'
DECL|class|Controller
indent|' '
name|'class'
name|'Controller'
op|'('
name|'object'
op|')'
op|':'
newline|'\n'
DECL|member|index
indent|' '
name|'def'
name|'index'
op|'('
name|'self'
op|','
name|'req'
op|','
name|'pants'
op|'='
name|'None'
op|')'
op|':'
newline|'\n'
indent|' '
name|'return'
name|'pants'
newline|'\n'
nl|'\n'
dedent|''
dedent|''
name|'controller'
op|'='
name|'Controller'
op|'('
op|')'
newline|'\n'
name|'resource'
op|'='
name|'wsgi'
op|'.'
name|'Resource'
op|'('
name|'controller'
op|')'
newline|'\n'
nl|'\n'
name|'env'
op|'='
op|'{'
nl|'\n'
string|"'wsgiorg.routing_args'"
op|':'
op|'['
name|'None'
op|','
op|'{'
nl|'\n'
string|"'controller'"
op|':'
name|'None'
op|','
nl|'\n'
string|"'format'"
op|':'
name|'None'
op|','
nl|'\n'
string|"'action'"
op|':'
string|"'update'"
op|','
nl|'\n'
string|"'id'"
op|':'
number|'12'
op|','
nl|'\n'
op|'}'
op|']'
op|','
nl|'\n'
op|'}'
newline|'\n'
nl|'\n'
name|'expected'
op|'='
op|'{'
string|"'action'"
op|':'
string|"'update'"
op|','
string|"'id'"
op|':'
number|'12'
op|'}'
newline|'\n'
nl|'\n'
name|'self'
op|'.'
name|'assertEqual'
op|'('
name|'resource'
op|'.'
name|'get_action_args'
op|'('
name|'env'
op|')'
op|','
name|'expected'
op|')'
newline|'\n'
nl|'\n'
DECL|member|test_get_body_bad_content
dedent|''
name|'def'
name|'test_get_body_bad_content'
op|'('
name|'self'
op|')'
op|':'
newline|'\n'
DECL|class|Controller
indent|' '
name|'class'
name|'Controller'
op|'('
name|'object'
op|')'
op|':'
newline|'\n'
DECL|member|index
indent|' '
name|'def'
name|'index'
op|'('
name|'self'
op|','
name|'req'
op|','
name|'pants'
op|'='
name|'None'
op|')'
op|':'
newline|'\n'
indent|' '
name|'return'
name|'pants'
newline|'\n'
nl|'\n'
dedent|''
dedent|''
name|'controller'
op|'='
name|'Controller'
op|'('
op|')'
newline|'\n'
name|'resource'
op|'='
name|'wsgi'
op|'.'
name|'Resource'
op|'('
name|'controller'
op|')'
newline|'\n'
nl|'\n'
name|'request'
op|'='
name|'wsgi'
op|'.'
name|'Request'
op|'.'
name|'blank'
op|'('
string|"'/'"
op|','
name|'method'
op|'='
string|"'POST'"
op|')'
newline|'\n'
name|'request'
op|'.'
name|'headers'
op|'['
string|"'Content-Type'"
op|']'
op|'='
string|"'application/none'"
newline|'\n'
name|'request'
op|'.'
name|'body'
op|'='
string|"b'foo'"
newline|'\n'
nl|'\n'
name|'self'
op|'.'
name|'assertRaises'
op|'('
name|'exception'
op|'.'
name|'InvalidContentType'
op|','
nl|'\n'
name|'resource'
op|'.'
name|'get_body'
op|','
name|'request'
op|')'
newline|'\n'
nl|'\n'
DECL|member|test_get_body_no_content_type
dedent|''
name|'def'
name|'test_get_body_no_content_type'
op|'('
name|'self'
op|')'
op|':'
newline|'\n'
DECL|class|Controller
indent|' '
name|'class'
name|'Controller'
op|'('
name|'object'
op|')'
op|':'
newline|'\n'
DECL|member|index
indent|' '
name|'def'
name|'index'
op|'('
name|'self'
op|','
name|'req'
op|','
name|'pants'
op|'='
name|'None'
op|')'
op|':'
newline|'\n'
indent|' '
name|'return'
name|'pants'
newline|'\n'
nl|'\n'
dedent|''
dedent|''
name|'controller'
op|'='
name|'Controller'
op|'('
op|')'
newline|'\n'
name|'resource'
op|'='
name|'wsgi'
op|'.'
name|'Resource'
op|'('
name|'controller'
op|')'
newline|'\n'
nl|'\n'
name|'request'
op|'='
name|'wsgi'
op|'.'
name|'Request'
op|'.'
name|'blank'
op|'('
string|"'/'"
op|','
name|'method'
op|'='
string|"'POST'"
op|')'
newline|'\n'
name|'request'
op|'.'
name|'body'
op|'='
string|"b'foo'"
newline|'\n'
nl|'\n'
name|'content_type'
op|','
name|'body'
op|'='
name|'resource'
op|'.'
name|'get_body'
op|'('
name|'request'
op|')'
newline|'\n'
name|'self'
op|'.'
name|'assertIsNone'
op|'('
name|'content_type'
op|')'
newline|'\n'
name|'self'
op|'.'
name|'assertEqual'
op|'('
string|"b'foo'"
op|','
name|'body'
op|')'
newline|'\n'
nl|'\n'
DECL|member|test_get_body_no_content_body
dedent|''
name|'def'
name|'test_get_body_no_content_body'
op|'('
name|'self'
op|')'
op|':'
newline|'\n'
DECL|class|Controller
indent|' '
name|'class'
name|'Controller'
op|'('
name|'object'
op|')'
op|':'
newline|'\n'
DECL|member|index
indent|' '
name|'def'
name|'index'
op|'('
name|'self'
op|','
name|'req'
op|','
name|'pants'
op|'='
name|'None'
op|')'
op|':'
newline|'\n'
indent|' '
name|'return'
name|'pants'
newline|'\n'
nl|'\n'
dedent|''
dedent|''
name|'controller'
op|'='
name|'Controller'
op|'('
op|')'
newline|'\n'
name|'resource'
op|'='
name|'wsgi'
op|'.'
name|'Resource'
op|'('
name|'controller'
op|')'
newline|'\n'
nl|'\n'
name|'request'
op|'='
name|'wsgi'
op|'.'
name|'Request'
op|'.'
name|'blank'
op|'('
string|"'/'"
op|','
name|'method'
op|'='
string|"'POST'"
op|')'
newline|'\n'
name|'request'
op|'.'
name|'headers'
op|'['
string|"'Content-Type'"
op|']'
op|'='
string|"'application/json'"
newline|'\n'
name|'request'
op|'.'
name|'body'
op|'='
string|"b''"
newline|'\n'
nl|'\n'
name|'content_type'
op|','
name|'body'
op|'='
name|'resource'
op|'.'
name|'get_body'
op|'('
name|'request'
op|')'
newline|'\n'
name|'self'
op|'.'
name|'assertEqual'
op|'('
string|"'application/json'"
op|','
name|'content_type'
op|')'
newline|'\n'
name|'self'
op|'.'
name|'assertEqual'
op|'('
string|"b''"
op|','
name|'body'
op|')'
newline|'\n'
nl|'\n'
DECL|member|test_get_body
dedent|''
name|'def'
name|'test_get_body'
op|'('
name|'self'
op|')'
op|':'
newline|'\n'
DECL|class|Controller
indent|' '
name|'class'
name|'Controller'
op|'('
name|'object'
op|')'
op|':'
newline|'\n'
DECL|member|index
indent|' '
name|'def'
name|'index'
op|'('
name|'self'
op|','
name|'req'
op|','
name|'pants'
op|'='
name|'None'
op|')'
op|':'
newline|'\n'
indent|' '
name|'return'
name|'pants'
newline|'\n'
nl|'\n'
dedent|''
dedent|''
name|'controller'
op|'='
name|'Controller'
op|'('
op|')'
newline|'\n'
name|'resource'
op|'='
name|'wsgi'
op|'.'
name|'Resource'
op|'('
name|'controller'
op|')'
newline|'\n'
nl|'\n'
name|'request'
op|'='
name|'wsgi'
op|'.'
name|'Request'
op|'.'
name|'blank'
op|'('
string|"'/'"
op|','
name|'method'
op|'='
string|"'POST'"
op|')'
newline|'\n'
name|'request'
op|'.'
name|'headers'
op|'['
string|"'Content-Type'"
op|']'
op|'='
string|"'application/json'"
newline|'\n'
name|'request'
op|'.'
name|'body'
op|'='
string|"b'foo'"
newline|'\n'
nl|'\n'
name|'content_type'
op|','
name|'body'
op|'='
name|'resource'
op|'.'
name|'get_body'
op|'('
name|'request'
op|')'
newline|'\n'
name|'self'
op|'.'
name|'assertEqual'
op|'('
name|'content_type'
op|','
string|"'application/json'"
op|')'
newline|'\n'
name|'self'
op|'.'
name|'assertEqual'
op|'('
string|"b'foo'"
op|','
name|'body'
op|')'
newline|'\n'
nl|'\n'
DECL|member|test_get_request_id_with_dict_response_body
dedent|''
name|'def'
name|'test_get_request_id_with_dict_response_body'
op|'('
name|'self'
op|')'
op|':'
newline|'\n'
DECL|class|Controller
indent|' '
name|'class'
name|'Controller'
op|'('
name|'wsgi'
op|'.'
name|'Controller'
op|')'
op|':'
newline|'\n'
DECL|member|index
indent|' '
name|'def'
name|'index'
op|'('
name|'self'
op|','
name|'req'
op|')'
op|':'
newline|'\n'
indent|' '
name|'return'
op|'{'
string|"'foo'"
op|':'
string|"'bar'"
op|'}'
newline|'\n'
nl|'\n'
dedent|''
dedent|''
name|'req'
op|'='
name|'fakes'
op|'.'
name|'HTTPRequest'
op|'.'
name|'blank'
op|'('
string|"'/tests'"
op|')'
newline|'\n'
name|'app'
op|'='
name|'fakes'
op|'.'
name|'TestRouter'
op|'('
name|'Controller'
op|'('
op|')'
op|')'
newline|'\n'
name|'response'
op|'='
name|'req'
op|'.'
name|'get_response'
op|'('
name|'app'
op|')'
newline|'\n'
name|'self'
op|'.'
name|'assertIn'
op|'('
string|"'nova.context'"
op|','
name|'req'
op|'.'
name|'environ'
op|')'
newline|'\n'
name|'self'
op|'.'
name|'assertEqual'
op|'('
string|'b\'{"foo": "bar"}\''
op|','
name|'response'
op|'.'
name|'body'
op|')'
newline|'\n'
name|'self'
op|'.'
name|'assertEqual'
op|'('
name|'response'
op|'.'
name|'status_int'
op|','
number|'200'
op|')'
newline|'\n'
nl|'\n'
DECL|member|test_no_request_id_with_str_response_body
dedent|''
name|'def'
name|'test_no_request_id_with_str_response_body'
op|'('
name|'self'
op|')'
op|':'
newline|'\n'
DECL|class|Controller
indent|' '
name|'class'
name|'Controller'
op|'('
name|'wsgi'
op|'.'
name|'Controller'
op|')'
op|':'
newline|'\n'
DECL|member|index
indent|' '
name|'def'
name|'index'
op|'('
name|'self'
op|','
name|'req'
op|')'
op|':'
newline|'\n'
indent|' '
name|'return'
string|"'foo'"
newline|'\n'
nl|'\n'
dedent|''
dedent|''
name|'req'
op|'='
name|'fakes'
op|'.'
name|'HTTPRequest'
op|'.'
name|'blank'
op|'('
string|"'/tests'"
op|')'
newline|'\n'
name|'app'
op|'='
name|'fakes'
op|'.'
name|'TestRouter'
op|'('
name|'Controller'
op|'('
op|')'
op|')'
newline|'\n'
name|'response'
op|'='
name|'req'
op|'.'
name|'get_response'
op|'('
name|'app'
op|')'
newline|'\n'
comment|'# NOTE(alaski): This test is really to ensure that a str response'
nl|'\n'
comment|"# doesn't error. Not having a request_id header is a side effect of"
nl|'\n'
comment|'# our wsgi setup, ideally it would be there.'
nl|'\n'
name|'expected_header'
op|'='
name|'self'
op|'.'
name|'get_req_id_header_name'
op|'('
name|'req'
op|')'
newline|'\n'
name|'self'
op|'.'
name|'assertFalse'
op|'('
name|'hasattr'
op|'('
name|'response'
op|'.'
name|'headers'
op|','
name|'expected_header'
op|')'
op|')'
newline|'\n'
name|'self'
op|'.'
name|'assertEqual'
op|'('
string|"b'foo'"
op|','
name|'response'
op|'.'
name|'body'
op|')'
newline|'\n'
name|'self'
op|'.'
name|'assertEqual'
op|'('
name|'response'
op|'.'
name|'status_int'
op|','
number|'200'
op|')'
newline|'\n'
nl|'\n'
DECL|member|test_get_request_id_no_response_body
dedent|''
name|'def'
name|'test_get_request_id_no_response_body'
op|'('
name|'self'
op|')'
op|':'
newline|'\n'
DECL|class|Controller
indent|' '
name|'class'
name|'Controller'
op|'('
name|'object'
op|')'
op|':'
newline|'\n'
DECL|member|index
indent|' '
name|'def'
name|'index'
op|'('
name|'self'
op|','
name|'req'
op|')'
op|':'
newline|'\n'
indent|' '
name|'pass'
newline|'\n'
nl|'\n'
dedent|''
dedent|''
name|'req'
op|'='
name|'fakes'
op|'.'
name|'HTTPRequest'
op|'.'
name|'blank'
op|'('
string|"'/tests'"
op|')'
newline|'\n'
name|'app'
op|'='
name|'fakes'
op|'.'
name|'TestRouter'
op|'('
name|'Controller'
op|'('
op|')'
op|')'
newline|'\n'
name|'response'
op|'='
name|'req'
op|'.'
name|'get_response'
op|'('
name|'app'
op|')'
newline|'\n'
name|'self'
op|'.'
name|'assertIn'
op|'('
string|"'nova.context'"
op|','
name|'req'
op|'.'
name|'environ'
op|')'
newline|'\n'
name|'self'
op|'.'
name|'assertEqual'
op|'('
string|"b''"
op|','
name|'response'
op|'.'
name|'body'
op|')'
newline|'\n'
name|'self'
op|'.'
name|'assertEqual'
op|'('
name|'response'
op|'.'
name|'status_int'
op|','
number|'200'
op|')'
newline|'\n'
nl|'\n'
DECL|member|test_deserialize_default
dedent|''
name|'def'
name|'test_deserialize_default'
op|'('
name|'self'
op|')'
op|':'
newline|'\n'
DECL|class|Controller
indent|' '
name|'class'
name|'Controller'
op|'('
name|'object'
op|')'
op|':'
newline|'\n'
DECL|member|index
indent|' '
name|'def'
name|'index'
op|'('
name|'self'
op|','
name|'req'
op|','
name|'pants'
op|'='
name|'None'
op|')'
op|':'
newline|'\n'
indent|' '
name|'return'
name|'pants'
newline|'\n'
nl|'\n'
dedent|''
dedent|''
name|'controller'
op|'='
name|'Controller'
op|'('
op|')'
newline|'\n'
name|'resource'
op|'='
name|'wsgi'
op|'.'
name|'Resource'
op|'('
name|'controller'
op|')'
newline|'\n'
nl|'\n'
name|'obj'
op|'='
name|'resource'
op|'.'
name|'deserialize'
op|'('
string|'\'["foo"]\''
op|')'
newline|'\n'
name|'self'
op|'.'
name|'assertEqual'
op|'('
name|'obj'
op|','
op|'{'
string|"'body'"
op|':'
op|'['
string|"'foo'"
op|']'
op|'}'
op|')'
newline|'\n'
nl|'\n'
DECL|member|test_register_actions
dedent|''
name|'def'
name|'test_register_actions'
op|'('
name|'self'
op|')'
op|':'
newline|'\n'
DECL|class|Controller
indent|' '
name|'class'
name|'Controller'
op|'('
name|'object'
op|')'
op|':'
newline|'\n'
DECL|member|index
indent|' '
name|'def'
name|'index'
op|'('
name|'self'
op|','
name|'req'
op|','
name|'pants'
op|'='
name|'None'
op|')'
op|':'
newline|'\n'
indent|' '
name|'return'
name|'pants'
newline|'\n'
nl|'\n'
DECL|class|ControllerExtended
dedent|''
dedent|''
name|'class'
name|'ControllerExtended'
op|'('
name|'wsgi'
op|'.'
name|'Controller'
op|')'
op|':'
newline|'\n'
indent|' '
op|'@'
name|'wsgi'
op|'.'
name|'action'
op|'('
string|"'fooAction'"
op|')'
newline|'\n'
DECL|member|_action_foo
name|'def'
name|'_action_foo'
op|'('
name|'self'
op|','
name|'req'
op|','
name|'id'
op|','
name|'body'
op|')'
op|':'
newline|'\n'
indent|' '
name|'return'
name|'body'
newline|'\n'
nl|'\n'
dedent|''
op|'@'
name|'wsgi'
op|'.'
name|'action'
op|'('
string|"'barAction'"
op|')'
newline|'\n'
DECL|member|_action_bar
name|'def'
name|'_action_bar'
op|'('
name|'self'
op|','
name|'req'
op|','
name|'id'
op|','
name|'body'
op|')'
op|':'
newline|'\n'
indent|' '
name|'return'
name|'body'
newline|'\n'
nl|'\n'
dedent|''
dedent|''
name|'controller'
op|'='
name|'Controller'
op|'('
op|')'
newline|'\n'
name|'resource'
op|'='
name|'wsgi'
op|'.'
name|'Resource'
op|'('
name|'controller'
op|')'
newline|'\n'
name|'self'
op|'.'
name|'assertEqual'
op|'('
op|'{'
op|'}'
op|','
name|'resource'
op|'.'
name|'wsgi_actions'
op|')'
newline|'\n'
nl|'\n'
name|'extended'
op|'='
name|'ControllerExtended'
op|'('
op|')'
newline|'\n'
name|'resource'
op|'.'
name|'register_actions'
op|'('
name|'extended'
op|')'
newline|'\n'
name|'self'
op|'.'
name|'assertEqual'
op|'('
op|'{'
nl|'\n'
string|"'fooAction'"
op|':'
name|'extended'
op|'.'
name|'_action_foo'
op|','
nl|'\n'
string|"'barAction'"
op|':'
name|'extended'
op|'.'
name|'_action_bar'
op|','
nl|'\n'
op|'}'
op|','
name|'resource'
op|'.'
name|'wsgi_actions'
op|')'
newline|'\n'
nl|'\n'
DECL|member|test_register_extensions
dedent|''
name|'def'
name|'test_register_extensions'
op|'('
name|'self'
op|')'
op|':'
newline|'\n'
DECL|class|Controller
indent|' '
name|'class'
name|'Controller'
op|'('
name|'object'
op|')'
op|':'
newline|'\n'
DECL|member|index
indent|' '
name|'def'
name|'index'
op|'('
name|'self'
op|','
name|'req'
op|','
name|'pants'
op|'='
name|'None'
op|')'
op|':'
newline|'\n'
indent|' '
name|'return'
name|'pants'
newline|'\n'
nl|'\n'
DECL|class|ControllerExtended
dedent|''
dedent|''
name|'class'
name|'ControllerExtended'
op|'('
name|'wsgi'
op|'.'
name|'Controller'
op|')'
op|':'
newline|'\n'
indent|' '
op|'@'
name|'wsgi'
op|'.'
name|'extends'
newline|'\n'
DECL|member|index
name|'def'
name|'index'
op|'('
name|'self'
op|','
name|'req'
op|','
name|'resp_obj'
op|','
name|'pants'
op|'='
name|'None'
op|')'
op|':'
newline|'\n'
indent|' '
name|'return'
name|'None'
newline|'\n'
nl|'\n'
dedent|''
op|'@'
name|'wsgi'
op|'.'
name|'extends'
op|'('
name|'action'
op|'='
string|"'fooAction'"
op|')'
newline|'\n'
DECL|member|_action_foo
name|'def'
name|'_action_foo'
op|'('
name|'self'
op|','
name|'req'
op|','
name|'resp'
op|','
name|'id'
op|','
name|'body'
op|')'
op|':'
newline|'\n'
indent|' '
name|'return'
name|'None'
newline|'\n'
nl|'\n'
dedent|''
dedent|''
name|'controller'
op|'='
name|'Controller'
op|'('
op|')'
newline|'\n'
name|'resource'
op|'='
name|'wsgi'
op|'.'
name|'Resource'
op|'('
name|'controller'
op|')'
newline|'\n'
name|'self'
op|'.'
name|'assertEqual'
op|'('
op|'{'
op|'}'
op|','
name|'resource'
op|'.'
name|'wsgi_extensions'
op|')'
newline|'\n'
name|'self'
op|'.'
name|'assertEqual'
op|'('
op|'{'
op|'}'
op|','
name|'resource'
op|'.'
name|'wsgi_action_extensions'
op|')'
newline|'\n'
nl|'\n'
name|'extended'
op|'='
name|'ControllerExtended'
op|'('
op|')'
newline|'\n'
name|'resource'
op|'.'
name|'register_extensions'
op|'('
name|'extended'
op|')'
newline|'\n'
name|'self'
op|'.'
name|'assertEqual'
op|'('
op|'{'
string|"'index'"
op|':'
op|'['
name|'extended'
op|'.'
name|'index'
op|']'
op|'}'
op|','
name|'resource'
op|'.'
name|'wsgi_extensions'
op|')'
newline|'\n'
name|'self'
op|'.'
name|'assertEqual'
op|'('
op|'{'
string|"'fooAction'"
op|':'
op|'['
name|'extended'
op|'.'
name|'_action_foo'
op|']'
op|'}'
op|','
nl|'\n'
name|'resource'
op|'.'
name|'wsgi_action_extensions'
op|')'
newline|'\n'
nl|'\n'
DECL|member|test_get_method_extensions
dedent|''
name|'def'
name|'test_get_method_extensions'
op|'('
name|'self'
op|')'
op|':'
newline|'\n'
DECL|class|Controller
indent|' '
name|'class'
name|'Controller'
op|'('
name|'object'
op|')'
op|':'
newline|'\n'
DECL|member|index
indent|' '
name|'def'
name|'index'
op|'('
name|'self'
op|','
name|'req'
op|','
name|'pants'
op|'='
name|'None'
op|')'
op|':'
newline|'\n'
indent|' '
name|'return'
name|'pants'
newline|'\n'
nl|'\n'
DECL|class|ControllerExtended
dedent|''
dedent|''
name|'class'
name|'ControllerExtended'
op|'('
name|'wsgi'
op|'.'
name|'Controller'
op|')'
op|':'
newline|'\n'
indent|' '
op|'@'
name|'wsgi'
op|'.'
name|'extends'
newline|'\n'
DECL|member|index
name|'def'
name|'index'
op|'('
name|'self'
op|','
name|'req'
op|','
name|'resp_obj'
op|','
name|'pants'
op|'='
name|'None'
op|')'
op|':'
newline|'\n'
indent|' '
name|'return'
name|'None'
newline|'\n'
nl|'\n'
dedent|''
dedent|''
name|'controller'
op|'='
name|'Controller'
op|'('
op|')'
newline|'\n'
name|'extended'
op|'='
name|'ControllerExtended'
op|'('
op|')'
newline|'\n'
name|'resource'
op|'='
name|'wsgi'
op|'.'
name|'Resource'
op|'('
name|'controller'
op|')'
newline|'\n'
name|'resource'
op|'.'
name|'register_extensions'
op|'('
name|'extended'
op|')'
newline|'\n'
name|'method'
op|','
name|'extensions'
op|'='
name|'resource'
op|'.'
name|'get_method'
op|'('
name|'None'
op|','
string|"'index'"
op|','
name|'None'
op|','
string|"''"
op|')'
newline|'\n'
name|'self'
op|'.'
name|'assertEqual'
op|'('
name|'method'
op|','
name|'controller'
op|'.'
name|'index'
op|')'
newline|'\n'
name|'self'
op|'.'
name|'assertEqual'
op|'('
name|'extensions'
op|','
op|'['
name|'extended'
op|'.'
name|'index'
op|']'
op|')'
newline|'\n'
nl|'\n'
DECL|member|test_get_method_action_extensions
dedent|''
name|'def'
name|'test_get_method_action_extensions'
op|'('
name|'self'
op|')'
op|':'
newline|'\n'
DECL|class|Controller
indent|' '
name|'class'
name|'Controller'
op|'('
name|'wsgi'
op|'.'
name|'Controller'
op|')'
op|':'
newline|'\n'
DECL|member|index
indent|' '
name|'def'
name|'index'
op|'('
name|'self'
op|','
name|'req'
op|','
name|'pants'
op|'='
name|'None'
op|')'
op|':'
newline|'\n'
indent|' '
name|'return'
name|'pants'
newline|'\n'
nl|'\n'
dedent|''
op|'@'
name|'wsgi'
op|'.'
name|'action'
op|'('
string|"'fooAction'"
op|')'
newline|'\n'
DECL|member|_action_foo
name|'def'
name|'_action_foo'
op|'('
name|'self'
op|','
name|'req'
op|','
name|'id'
op|','
name|'body'
op|')'
op|':'
newline|'\n'
indent|' '
name|'return'
name|'body'
newline|'\n'
nl|'\n'
DECL|class|ControllerExtended
dedent|''
dedent|''
name|'class'
name|'ControllerExtended'
op|'('
name|'wsgi'
op|'.'
name|'Controller'
op|')'
op|':'
newline|'\n'
indent|' '
op|'@'
name|'wsgi'
op|'.'
name|'extends'
op|'('
name|'action'
op|'='
string|"'fooAction'"
op|')'
newline|'\n'
DECL|member|_action_foo
name|'def'
name|'_action_foo'
op|'('
name|'self'
op|','
name|'req'
op|','
name|'resp_obj'
op|','
name|'id'
op|','
name|'body'
op|')'
op|':'
newline|'\n'
indent|' '
name|'return'
name|'None'
newline|'\n'
nl|'\n'
dedent|''
dedent|''
name|'controller'
op|'='
name|'Controller'
op|'('
op|')'
newline|'\n'
name|'extended'
op|'='
name|'ControllerExtended'
op|'('
op|')'
newline|'\n'
name|'resource'
op|'='
name|'wsgi'
op|'.'
name|'Resource'
op|'('
name|'controller'
op|')'
newline|'\n'
name|'resource'
op|'.'
name|'register_extensions'
op|'('
name|'extended'
op|')'
newline|'\n'
name|'method'
op|','
name|'extensions'
op|'='
name|'resource'
op|'.'
name|'get_method'
op|'('
name|'None'
op|','
string|"'action'"
op|','
nl|'\n'
string|"'application/json'"
op|','
nl|'\n'
string|'\'{"fooAction": true}\''
op|')'
newline|'\n'
name|'self'
op|'.'
name|'assertEqual'
op|'('
name|'method'
op|','
name|'controller'
op|'.'
name|'_action_foo'
op|')'
newline|'\n'
name|'self'
op|'.'
name|'assertEqual'
op|'('
name|'extensions'
op|','
op|'['
name|'extended'
op|'.'
name|'_action_foo'
op|']'
op|')'
newline|'\n'
nl|'\n'
DECL|member|test_get_method_action_whitelist_extensions
dedent|''
name|'def'
name|'test_get_method_action_whitelist_extensions'
op|'('
name|'self'
op|')'
op|':'
newline|'\n'
DECL|class|Controller
indent|' '
name|'class'
name|'Controller'
op|'('
name|'wsgi'
op|'.'
name|'Controller'
op|')'
op|':'
newline|'\n'
DECL|member|index
indent|' '
name|'def'
name|'index'
op|'('
name|'self'
op|','
name|'req'
op|','
name|'pants'
op|'='
name|'None'
op|')'
op|':'
newline|'\n'
indent|' '
name|'return'
name|'pants'
newline|'\n'
nl|'\n'
DECL|class|ControllerExtended
dedent|''
dedent|''
name|'class'
name|'ControllerExtended'
op|'('
name|'wsgi'
op|'.'
name|'Controller'
op|')'
op|':'
newline|'\n'
indent|' '
op|'@'
name|'wsgi'
op|'.'
name|'action'
op|'('
string|"'create'"
op|')'
newline|'\n'
DECL|member|_create
name|'def'
name|'_create'
op|'('
name|'self'
op|','
name|'req'
op|','
name|'body'
op|')'
op|':'
newline|'\n'
indent|' '
name|'pass'
newline|'\n'
nl|'\n'
dedent|''
op|'@'
name|'wsgi'
op|'.'
name|'action'
op|'('
string|"'delete'"
op|')'
newline|'\n'
DECL|member|_delete
name|'def'
name|'_delete'
op|'('
name|'self'
op|','
name|'req'
op|','
name|'id'
op|')'
op|':'
newline|'\n'
indent|' '
name|'pass'
newline|'\n'
nl|'\n'
dedent|''
dedent|''
name|'controller'
op|'='
name|'Controller'
op|'('
op|')'
newline|'\n'
name|'extended'
op|'='
name|'ControllerExtended'
op|'('
op|')'
newline|'\n'
name|'resource'
op|'='
name|'wsgi'
op|'.'
name|'Resource'
op|'('
name|'controller'
op|')'
newline|'\n'
name|'resource'
op|'.'
name|'register_actions'
op|'('
name|'extended'
op|')'
newline|'\n'
nl|'\n'
name|'method'
op|','
name|'extensions'
op|'='
name|'resource'
op|'.'
name|'get_method'
op|'('
name|'None'
op|','
string|"'create'"
op|','
nl|'\n'
string|"'application/json'"
op|','
nl|'\n'
string|'\'{"create": true}\''
op|')'
newline|'\n'
name|'self'
op|'.'
name|'assertEqual'
op|'('
name|'method'
op|','
name|'extended'
op|'.'
name|'_create'
op|')'
newline|'\n'
name|'self'
op|'.'
name|'assertEqual'
op|'('
name|'extensions'
op|','
op|'['
op|']'
op|')'
newline|'\n'
nl|'\n'
name|'method'
op|','
name|'extensions'
op|'='
name|'resource'
op|'.'
name|'get_method'
op|'('
name|'None'
op|','
string|"'delete'"
op|','
name|'None'
op|','
name|'None'
op|')'
newline|'\n'
name|'self'
op|'.'
name|'assertEqual'
op|'('
name|'method'
op|','
name|'extended'
op|'.'
name|'_delete'
op|')'
newline|'\n'
name|'self'
op|'.'
name|'assertEqual'
op|'('
name|'extensions'
op|','
op|'['
op|']'
op|')'
newline|'\n'
nl|'\n'
DECL|member|test_pre_process_extensions_regular
dedent|''
name|'def'
name|'test_pre_process_extensions_regular'
op|'('
name|'self'
op|')'
op|':'
newline|'\n'
DECL|class|Controller
indent|' '
name|'class'
name|'Controller'
op|'('
name|'object'
op|')'
op|':'
newline|'\n'
DECL|member|index
indent|' '
name|'def'
name|'index'
op|'('
name|'self'
op|','
name|'req'
op|','
name|'pants'
op|'='
name|'None'
op|')'
op|':'
newline|'\n'
indent|' '
name|'return'
name|'pants'
newline|'\n'
nl|'\n'
dedent|''
dedent|''
name|'controller'
op|'='
name|'Controller'
op|'('
op|')'
newline|'\n'
name|'resource'
op|'='
name|'wsgi'
op|'.'
name|'Resource'
op|'('
name|'controller'
op|')'
newline|'\n'
nl|'\n'
name|'called'
op|'='
op|'['
op|']'
newline|'\n'
nl|'\n'
DECL|function|extension1
name|'def'
name|'extension1'
op|'('
name|'req'
op|','
name|'resp_obj'
op|')'
op|':'
newline|'\n'
indent|' '
name|'called'
op|'.'
name|'append'
op|'('
number|'1'
op|')'
newline|'\n'
name|'return'
name|'None'
newline|'\n'
nl|'\n'
DECL|function|extension2
dedent|''
name|'def'
name|'extension2'
op|'('
name|'req'
op|','
name|'resp_obj'
op|')'
op|':'
newline|'\n'
indent|' '
name|'called'
op|'.'
name|'append'
op|'('
number|'2'
op|')'
newline|'\n'
name|'return'
name|'None'
newline|'\n'
nl|'\n'
dedent|''
name|'extensions'
op|'='
op|'['
name|'extension1'
op|','
name|'extension2'
op|']'
newline|'\n'
name|'response'
op|','
name|'post'
op|'='
name|'resource'
op|'.'
name|'pre_process_extensions'
op|'('
name|'extensions'
op|','
name|'None'
op|','
op|'{'
op|'}'
op|')'
newline|'\n'
name|'self'
op|'.'
name|'assertEqual'
op|'('
name|'called'
op|','
op|'['
op|']'
op|')'
newline|'\n'
name|'self'
op|'.'
name|'assertIsNone'
op|'('
name|'response'
op|')'
newline|'\n'
name|'self'
op|'.'
name|'assertEqual'
op|'('
name|'list'
op|'('
name|'post'
op|')'
op|','
op|'['
name|'extension2'
op|','
name|'extension1'
op|']'
op|')'
newline|'\n'
nl|'\n'
DECL|member|test_pre_process_extensions_generator
dedent|''
name|'def'
name|'test_pre_process_extensions_generator'
op|'('
name|'self'
op|')'
op|':'
newline|'\n'
DECL|class|Controller
indent|' '
name|'class'
name|'Controller'
op|'('
name|'object'
op|')'
op|':'
newline|'\n'
DECL|member|index
indent|' '
name|'def'
name|'index'
op|'('
name|'self'
op|','
name|'req'
op|','
name|'pants'
op|'='
name|'None'
op|')'
op|':'
newline|'\n'
indent|' '
name|'return'
name|'pants'
newline|'\n'
nl|'\n'
dedent|''
dedent|''
name|'controller'
op|'='
name|'Controller'
op|'('
op|')'
newline|'\n'
name|'resource'
op|'='
name|'wsgi'
op|'.'
name|'Resource'
op|'('
name|'controller'
op|')'
newline|'\n'
nl|'\n'
name|'called'
op|'='
op|'['
op|']'
newline|'\n'
nl|'\n'
DECL|function|extension1
name|'def'
name|'extension1'
op|'('
name|'req'
op|')'
op|':'
newline|'\n'
indent|' '
name|'called'
op|'.'
name|'append'
op|'('
string|"'pre1'"
op|')'
newline|'\n'
name|'yield'
newline|'\n'
name|'called'
op|'.'
name|'append'
op|'('
string|"'post1'"
op|')'
newline|'\n'
nl|'\n'
DECL|function|extension2
dedent|''
name|'def'
name|'extension2'
op|'('
name|'req'
op|')'
op|':'
newline|'\n'
indent|' '
name|'called'
op|'.'
name|'append'
op|'('
string|"'pre2'"
op|')'
newline|'\n'
name|'yield'
newline|'\n'
name|'called'
op|'.'
name|'append'
op|'('
string|"'post2'"
op|')'
newline|'\n'
nl|'\n'
dedent|''
name|'extensions'
op|'='
op|'['
name|'extension1'
op|','
name|'extension2'
op|']'
newline|'\n'
name|'response'
op|','
name|'post'
op|'='
name|'resource'
op|'.'
name|'pre_process_extensions'
op|'('
name|'extensions'
op|','
name|'None'
op|','
op|'{'
op|'}'
op|')'
newline|'\n'
name|'post'
op|'='
name|'list'
op|'('
name|'post'
op|')'
newline|'\n'
name|'self'
op|'.'
name|'assertEqual'
op|'('
name|'called'
op|','
op|'['
string|"'pre1'"
op|','
string|"'pre2'"
op|']'
op|')'
newline|'\n'
name|'self'
op|'.'
name|'assertIsNone'
op|'('
name|'response'
op|')'
newline|'\n'
name|'self'
op|'.'
name|'assertEqual'
op|'('
name|'len'
op|'('
name|'post'
op|')'
op|','
number|'2'
op|')'
newline|'\n'
name|'self'
op|'.'
name|'assertTrue'
op|'('
name|'inspect'
op|'.'
name|'isgenerator'
op|'('
name|'post'
op|'['
number|'0'
op|']'
op|')'
op|')'
newline|'\n'
name|'self'
op|'.'
name|'assertTrue'
op|'('
name|'inspect'
op|'.'
name|'isgenerator'
op|'('
name|'post'
op|'['
number|'1'
op|']'
op|')'
op|')'
newline|'\n'
nl|'\n'
name|'for'
name|'gen'
name|'in'
name|'post'
op|':'
newline|'\n'
indent|' '
name|'try'
op|':'
newline|'\n'
indent|' '
name|'gen'
op|'.'
name|'send'
op|'('
name|'None'
op|')'
newline|'\n'
dedent|''
name|'except'
name|'StopIteration'
op|':'
newline|'\n'
indent|' '
name|'continue'
newline|'\n'
nl|'\n'
dedent|''
dedent|''
name|'self'
op|'.'
name|'assertEqual'
op|'('
name|'called'
op|','
op|'['
string|"'pre1'"
op|','
string|"'pre2'"
op|','
string|"'post2'"
op|','
string|"'post1'"
op|']'
op|')'
newline|'\n'
nl|'\n'
DECL|member|test_pre_process_extensions_generator_response
dedent|''
name|'def'
name|'test_pre_process_extensions_generator_response'
op|'('
name|'self'
op|')'
op|':'
newline|'\n'
DECL|class|Controller
indent|' '
name|'class'
name|'Controller'
op|'('
name|'object'
op|')'
op|':'
newline|'\n'
DECL|member|index
indent|' '
name|'def'
name|'index'
op|'('
name|'self'
op|','
name|'req'
op|','
name|'pants'
op|'='
name|'None'
op|')'
op|':'
newline|'\n'
indent|' '
name|'return'
name|'pants'
newline|'\n'
nl|'\n'
dedent|''
dedent|''
name|'controller'
op|'='
name|'Controller'
op|'('
op|')'
newline|'\n'
name|'resource'
op|'='
name|'wsgi'
op|'.'
name|'Resource'
op|'('
name|'controller'
op|')'
newline|'\n'
nl|'\n'
name|'called'
op|'='
op|'['
op|']'
newline|'\n'
nl|'\n'
DECL|function|extension1
name|'def'
name|'extension1'
op|'('
name|'req'
op|')'
op|':'
newline|'\n'
indent|' '
name|'called'
op|'.'
name|'append'
op|'('
string|"'pre1'"
op|')'
newline|'\n'
name|'yield'
string|"'foo'"
newline|'\n'
nl|'\n'
DECL|function|extension2
dedent|''
name|'def'
name|'extension2'
op|'('
name|'req'
op|')'
op|':'
newline|'\n'
indent|' '
name|'called'
op|'.'
name|'append'
op|'('
string|"'pre2'"
op|')'
newline|'\n'
nl|'\n'
dedent|''
name|'extensions'
op|'='
op|'['
name|'extension1'
op|','
name|'extension2'
op|']'
newline|'\n'
name|'response'
op|','
name|'post'
op|'='
name|'resource'
op|'.'
name|'pre_process_extensions'
op|'('
name|'extensions'
op|','
name|'None'
op|','
op|'{'
op|'}'
op|')'
newline|'\n'
name|'self'
op|'.'
name|'assertEqual'
op|'('
name|'called'
op|','
op|'['
string|"'pre1'"
op|']'
op|')'
newline|'\n'
name|'self'
op|'.'
name|'assertEqual'
op|'('
name|'response'
op|','
string|"'foo'"
op|')'
newline|'\n'
name|'self'
op|'.'
name|'assertEqual'
op|'('
name|'post'
op|','
op|'['
op|']'
op|')'
newline|'\n'
nl|'\n'
DECL|member|test_post_process_extensions_regular
dedent|''
name|'def'
name|'test_post_process_extensions_regular'
op|'('
name|'self'
op|')'
op|':'
newline|'\n'
DECL|class|Controller
indent|' '
name|'class'
name|'Controller'
op|'('
name|'object'
op|')'
op|':'
newline|'\n'
DECL|member|index
indent|' '
name|'def'
name|'index'
op|'('
name|'self'
op|','
name|'req'
op|','
name|'pants'
op|'='
name|'None'
op|')'
op|':'
newline|'\n'
indent|' '
name|'return'
name|'pants'
newline|'\n'
nl|'\n'
dedent|''
dedent|''
name|'controller'
op|'='
name|'Controller'
op|'('
op|')'
newline|'\n'
name|'resource'
op|'='
name|'wsgi'
op|'.'
name|'Resource'
op|'('
name|'controller'
op|')'
newline|'\n'
nl|'\n'
name|'called'
op|'='
op|'['
op|']'
newline|'\n'
nl|'\n'
DECL|function|extension1
name|'def'
name|'extension1'
op|'('
name|'req'
op|','
name|'resp_obj'
op|')'
op|':'
newline|'\n'
indent|' '
name|'called'
op|'.'
name|'append'
op|'('
number|'1'
op|')'
newline|'\n'
name|'return'
name|'None'
newline|'\n'
nl|'\n'
DECL|function|extension2
dedent|''
name|'def'
name|'extension2'
op|'('
name|'req'
op|','
name|'resp_obj'
op|')'
op|':'
newline|'\n'
indent|' '
name|'called'
op|'.'
name|'append'
op|'('
number|'2'
op|')'
newline|'\n'
name|'return'
name|'None'
newline|'\n'
nl|'\n'
dedent|''
name|'response'
op|'='
name|'resource'
op|'.'
name|'post_process_extensions'
op|'('
op|'['
name|'extension2'
op|','
name|'extension1'
op|']'
op|','
nl|'\n'
name|'None'
op|','
name|'None'
op|','
op|'{'
op|'}'
op|')'
newline|'\n'
name|'self'
op|'.'
name|'assertEqual'
op|'('
name|'called'
op|','
op|'['
number|'2'
op|','
number|'1'
op|']'
op|')'
newline|'\n'
name|'self'
op|'.'
name|'assertIsNone'
op|'('
name|'response'
op|')'
newline|'\n'
nl|'\n'
DECL|member|test_post_process_extensions_regular_response
dedent|''
name|'def'
name|'test_post_process_extensions_regular_response'
op|'('
name|'self'
op|')'
op|':'
newline|'\n'
DECL|class|Controller
indent|' '
name|'class'
name|'Controller'
op|'('
name|'object'
op|')'
op|':'
newline|'\n'
DECL|member|index
indent|' '
name|'def'
name|'index'
op|'('
name|'self'
op|','
name|'req'
op|','
name|'pants'
op|'='
name|'None'
op|')'
op|':'
newline|'\n'
indent|' '
name|'return'
name|'pants'
newline|'\n'
nl|'\n'
dedent|''
dedent|''
name|'controller'
op|'='
name|'Controller'
op|'('
op|')'
newline|'\n'
name|'resource'
op|'='
name|'wsgi'
op|'.'
name|'Resource'
op|'('
name|'controller'
op|')'
newline|'\n'
nl|'\n'
name|'called'
op|'='
op|'['
op|']'
newline|'\n'
nl|'\n'
DECL|function|extension1
name|'def'
name|'extension1'
op|'('
name|'req'
op|','
name|'resp_obj'
op|')'
op|':'
newline|'\n'
indent|' '
name|'called'
op|'.'
name|'append'
op|'('
number|'1'
op|')'
newline|'\n'
name|'return'
name|'None'
newline|'\n'
nl|'\n'
DECL|function|extension2
dedent|''
name|'def'
name|'extension2'
op|'('
name|'req'
op|','
name|'resp_obj'
op|')'
op|':'
newline|'\n'
indent|' '
name|'called'
op|'.'
name|'append'
op|'('
number|'2'
op|')'
newline|'\n'
name|'return'
string|"'foo'"
newline|'\n'
nl|'\n'
dedent|''
name|'response'
op|'='
name|'resource'
op|'.'
name|'post_process_extensions'
op|'('
op|'['
name|'extension2'
op|','
name|'extension1'
op|']'
op|','
nl|'\n'
name|'None'
op|','
name|'None'
op|','
op|'{'
op|'}'
op|')'
newline|'\n'
name|'self'
op|'.'
name|'assertEqual'
op|'('
name|'called'
op|','
op|'['
number|'2'
op|']'
op|')'
newline|'\n'
name|'self'
op|'.'
name|'assertEqual'
op|'('
name|'response'
op|','
string|"'foo'"
op|')'
newline|'\n'
nl|'\n'
DECL|member|test_post_process_extensions_generator
dedent|''
name|'def'
name|'test_post_process_extensions_generator'
op|'('
name|'self'
op|')'
op|':'
newline|'\n'
DECL|class|Controller
indent|' '
name|'class'
name|'Controller'
op|'('
name|'object'
op|')'
op|':'
newline|'\n'
DECL|member|index
indent|' '
name|'def'
name|'index'
op|'('
name|'self'
op|','
name|'req'
op|','
name|'pants'
op|'='
name|'None'
op|')'
op|':'
newline|'\n'
indent|' '
name|'return'
name|'pants'
newline|'\n'
nl|'\n'
dedent|''
dedent|''
name|'controller'
op|'='
name|'Controller'
op|'('
op|')'
newline|'\n'
name|'resource'
op|'='
name|'wsgi'
op|'.'
name|'Resource'
op|'('
name|'controller'
op|')'
newline|'\n'
nl|'\n'
name|'called'
op|'='
op|'['
op|']'
newline|'\n'
nl|'\n'
DECL|function|extension1
name|'def'
name|'extension1'
op|'('
name|'req'
op|')'
op|':'
newline|'\n'
indent|' '
name|'yield'
newline|'\n'
name|'called'
op|'.'
name|'append'
op|'('
number|'1'
op|')'
newline|'\n'
nl|'\n'
DECL|function|extension2
dedent|''
name|'def'
name|'extension2'
op|'('
name|'req'
op|')'
op|':'
newline|'\n'
indent|' '
name|'yield'
newline|'\n'
name|'called'
op|'.'
name|'append'
op|'('
number|'2'
op|')'
newline|'\n'
nl|'\n'
dedent|''
name|'ext1'
op|'='
name|'extension1'
op|'('
name|'None'
op|')'
newline|'\n'
name|'next'
op|'('
name|'ext1'
op|')'
newline|'\n'
name|'ext2'
op|'='
name|'extension2'
op|'('
name|'None'
op|')'
newline|'\n'
name|'next'
op|'('
name|'ext2'
op|')'
newline|'\n'
nl|'\n'
name|'response'
op|'='
name|'resource'
op|'.'
name|'post_process_extensions'
op|'('
op|'['
name|'ext2'
op|','
name|'ext1'
op|']'
op|','
nl|'\n'
name|'None'
op|','
name|'None'
op|','
op|'{'
op|'}'
op|')'
newline|'\n'
nl|'\n'
name|'self'
op|'.'
name|'assertEqual'
op|'('
name|'called'
op|','
op|'['
number|'2'
op|','
number|'1'
op|']'
op|')'
newline|'\n'
name|'self'
op|'.'
name|'assertIsNone'
op|'('
name|'response'
op|')'
newline|'\n'
nl|'\n'
DECL|member|test_post_process_extensions_generator_response
dedent|''
name|'def'
name|'test_post_process_extensions_generator_response'
op|'('
name|'self'
op|')'
op|':'
newline|'\n'
DECL|class|Controller
indent|' '
name|'class'
name|'Controller'
op|'('
name|'object'
op|')'
op|':'
newline|'\n'
DECL|member|index
indent|' '
name|'def'
name|'index'
op|'('
name|'self'
op|','
name|'req'
op|','
name|'pants'
op|'='
name|'None'
op|')'
op|':'
newline|'\n'
indent|' '
name|'return'
name|'pants'
newline|'\n'
nl|'\n'
dedent|''
dedent|''
name|'controller'
op|'='
name|'Controller'
op|'('
op|')'
newline|'\n'
name|'resource'
op|'='
name|'wsgi'
op|'.'
name|'Resource'
op|'('
name|'controller'
op|')'
newline|'\n'
nl|'\n'
name|'called'
op|'='
op|'['
op|']'
newline|'\n'
nl|'\n'
DECL|function|extension1
name|'def'
name|'extension1'
op|'('
name|'req'
op|')'
op|':'
newline|'\n'
indent|' '
name|'yield'
newline|'\n'
name|'called'
op|'.'
name|'append'
op|'('
number|'1'
op|')'
newline|'\n'
nl|'\n'
DECL|function|extension2
dedent|''
name|'def'
name|'extension2'
op|'('
name|'req'
op|')'
op|':'
newline|'\n'
indent|' '
name|'yield'
newline|'\n'
name|'called'
op|'.'
name|'append'
op|'('
number|'2'
op|')'
newline|'\n'
name|'yield'
string|"'foo'"
newline|'\n'
nl|'\n'
dedent|''
name|'ext1'
op|'='
name|'extension1'
op|'('
name|'None'
op|')'
newline|'\n'
name|'next'
op|'('
name|'ext1'
op|')'
newline|'\n'
name|'ext2'
op|'='
name|'extension2'
op|'('
name|'None'
op|')'
newline|'\n'
name|'next'
op|'('
name|'ext2'
op|')'
newline|'\n'
nl|'\n'
name|'response'
op|'='
name|'resource'
op|'.'
name|'post_process_extensions'
op|'('
op|'['
name|'ext2'
op|','
name|'ext1'
op|']'
op|','
nl|'\n'
name|'None'
op|','
name|'None'
op|','
op|'{'
op|'}'
op|')'
newline|'\n'
nl|'\n'
name|'self'
op|'.'
name|'assertEqual'
op|'('
name|'called'
op|','
op|'['
number|'2'
op|']'
op|')'
newline|'\n'
name|'self'
op|'.'
name|'assertEqual'
op|'('
name|'response'
op|','
string|"'foo'"
op|')'
newline|'\n'
nl|'\n'
DECL|member|test_resource_exception_handler_type_error
dedent|''
name|'def'
name|'test_resource_exception_handler_type_error'
op|'('
name|'self'
op|')'
op|':'
newline|'\n'
comment|'# A TypeError should be translated to a Fault/HTTP 400.'
nl|'\n'
DECL|function|foo
indent|' '
name|'def'
name|'foo'
op|'('
name|'a'
op|','
op|')'
op|':'
newline|'\n'
indent|' '
name|'return'
name|'a'
newline|'\n'
nl|'\n'
dedent|''
name|'try'
op|':'
newline|'\n'
indent|' '
name|'with'
name|'wsgi'
op|'.'
name|'ResourceExceptionHandler'
op|'('
op|')'
op|':'
newline|'\n'
indent|' '
name|'foo'
op|'('
op|')'
comment|'# generate a TypeError'
newline|'\n'
dedent|''
name|'self'
op|'.'
name|'fail'
op|'('
string|'"Should have raised a Fault (HTTP 400)"'
op|')'
newline|'\n'
dedent|''
name|'except'
name|'wsgi'
op|'.'
name|'Fault'
name|'as'
name|'fault'
op|':'
newline|'\n'
indent|' '
name|'self'
op|'.'
name|'assertEqual'
op|'('
number|'400'
op|','
name|'fault'
op|'.'
name|'status_int'
op|')'
newline|'\n'
nl|'\n'
DECL|member|test_resource_headers_are_utf8
dedent|''
dedent|''
name|'def'
name|'test_resource_headers_are_utf8'
op|'('
name|'self'
op|')'
op|':'
newline|'\n'
indent|' '
name|'resp'
op|'='
name|'webob'
op|'.'
name|'Response'
op|'('
name|'status_int'
op|'='
number|'202'
op|')'
newline|'\n'
name|'resp'
op|'.'
name|'headers'
op|'['
string|"'x-header1'"
op|']'
op|'='
number|'1'
newline|'\n'
name|'resp'
op|'.'
name|'headers'
op|'['
string|"'x-header2'"
op|']'
op|'='
string|"u'header2'"
newline|'\n'
name|'resp'
op|'.'
name|'headers'
op|'['
string|"'x-header3'"
op|']'
op|'='
string|"u'header3'"
newline|'\n'
nl|'\n'
DECL|class|Controller
name|'class'
name|'Controller'
op|'('
name|'object'
op|')'
op|':'
newline|'\n'
DECL|member|index
indent|' '
name|'def'
name|'index'
op|'('
name|'self'
op|','
name|'req'
op|')'
op|':'
newline|'\n'
indent|' '
name|'return'
name|'resp'
newline|'\n'
nl|'\n'
dedent|''
dedent|''
name|'req'
op|'='
name|'webob'
op|'.'
name|'Request'
op|'.'
name|'blank'
op|'('
string|"'/tests'"
op|')'
newline|'\n'
name|'app'
op|'='
name|'fakes'
op|'.'
name|'TestRouter'
op|'('
name|'Controller'
op|'('
op|')'
op|')'
newline|'\n'
name|'response'
op|'='
name|'req'
op|'.'
name|'get_response'
op|'('
name|'app'
op|')'
newline|'\n'
name|'for'
name|'val'
name|'in'
name|'six'
op|'.'
name|'itervalues'
op|'('
name|'response'
op|'.'
name|'headers'
op|')'
op|':'
newline|'\n'
comment|'# All headers must be utf8'
nl|'\n'
indent|' '
name|'self'
op|'.'
name|'assertThat'
op|'('
name|'val'
op|','
name|'matchers'
op|'.'
name|'EncodedByUTF8'
op|'('
op|')'
op|')'
newline|'\n'
dedent|''
name|'self'
op|'.'
name|'assertEqual'
op|'('
string|"b'1'"
op|','
name|'response'
op|'.'
name|'headers'
op|'['
string|"'x-header1'"
op|']'
op|')'
newline|'\n'
name|'self'
op|'.'
name|'assertEqual'
op|'('
string|"b'header2'"
op|','
name|'response'
op|'.'
name|'headers'
op|'['
string|"'x-header2'"
op|']'
op|')'
newline|'\n'
name|'self'
op|'.'
name|'assertEqual'
op|'('
string|"b'header3'"
op|','
name|'response'
op|'.'
name|'headers'
op|'['
string|"'x-header3'"
op|']'
op|')'
newline|'\n'
nl|'\n'
DECL|member|test_resource_valid_utf8_body
dedent|''
name|'def'
name|'test_resource_valid_utf8_body'
op|'('
name|'self'
op|')'
op|':'
newline|'\n'
DECL|class|Controller
indent|' '
name|'class'
name|'Controller'
op|'('
name|'object'
op|')'
op|':'
newline|'\n'
DECL|member|update
indent|' '
name|'def'
name|'update'
op|'('
name|'self'
op|','
name|'req'
op|','
name|'id'
op|','
name|'body'
op|')'
op|':'
newline|'\n'
indent|' '
name|'return'
name|'body'
newline|'\n'
nl|'\n'
dedent|''
dedent|''
name|'req'
op|'='
name|'webob'
op|'.'
name|'Request'
op|'.'
name|'blank'
op|'('
string|"'/tests/test_id'"
op|','
name|'method'
op|'='
string|'"PUT"'
op|')'
newline|'\n'
name|'body'
op|'='
string|'b""" {"name": "\\xe6\\xa6\\x82\\xe5\\xbf\\xb5" } """'
newline|'\n'
name|'expected_body'
op|'='
string|'b\'{"name": "\\\\u6982\\\\u5ff5"}\''
newline|'\n'
name|'req'
op|'.'
name|'body'
op|'='
name|'body'
newline|'\n'
name|'req'
op|'.'
name|'headers'
op|'['
string|"'Content-Type'"
op|']'
op|'='
string|"'application/json'"
newline|'\n'
name|'app'
op|'='
name|'fakes'
op|'.'
name|'TestRouter'
op|'('
name|'Controller'
op|'('
op|')'
op|')'
newline|'\n'
name|'response'
op|'='
name|'req'
op|'.'
name|'get_response'
op|'('
name|'app'
op|')'
newline|'\n'
name|'self'
op|'.'
name|'assertEqual'
op|'('
name|'response'
op|'.'
name|'body'
op|','
name|'expected_body'
op|')'
newline|'\n'
name|'self'
op|'.'
name|'assertEqual'
op|'('
name|'response'
op|'.'
name|'status_int'
op|','
number|'200'
op|')'
newline|'\n'
nl|'\n'
DECL|member|test_resource_invalid_utf8
dedent|''
name|'def'
name|'test_resource_invalid_utf8'
op|'('
name|'self'
op|')'
op|':'
newline|'\n'
DECL|class|Controller
indent|' '
name|'class'
name|'Controller'
op|'('
name|'object'
op|')'
op|':'
newline|'\n'
DECL|member|update
indent|' '
name|'def'
name|'update'
op|'('
name|'self'
op|','
name|'req'
op|','
name|'id'
op|','
name|'body'
op|')'
op|':'
newline|'\n'
indent|' '
name|'return'
name|'body'
newline|'\n'
nl|'\n'
dedent|''
dedent|''
name|'req'
op|'='
name|'webob'
op|'.'
name|'Request'
op|'.'
name|'blank'
op|'('
string|"'/tests/test_id'"
op|','
name|'method'
op|'='
string|'"PUT"'
op|')'
newline|'\n'
name|'body'
op|'='
string|'b""" {"name": "\\xf0\\x28\\x8c\\x28" } """'
newline|'\n'
name|'req'
op|'.'
name|'body'
op|'='
name|'body'
newline|'\n'
name|'req'
op|'.'
name|'headers'
op|'['
string|"'Content-Type'"
op|']'
op|'='
string|"'application/json'"
newline|'\n'
name|'app'
op|'='
name|'fakes'
op|'.'
name|'TestRouter'
op|'('
name|'Controller'
op|'('
op|')'
op|')'
newline|'\n'
name|'self'
op|'.'
name|'assertRaises'
op|'('
name|'UnicodeDecodeError'
op|','
name|'req'
op|'.'
name|'get_response'
op|','
name|'app'
op|')'
newline|'\n'
nl|'\n'
nl|'\n'
DECL|class|ResponseObjectTest
dedent|''
dedent|''
name|'class'
name|'ResponseObjectTest'
op|'('
name|'test'
op|'.'
name|'NoDBTestCase'
op|')'
op|':'
newline|'\n'
DECL|member|test_default_code
indent|' '
name|'def'
name|'test_default_code'
op|'('
name|'self'
op|')'
op|':'
newline|'\n'
indent|' '
name|'robj'
op|'='
name|'wsgi'
op|'.'
name|'ResponseObject'
op|'('
op|'{'
op|'}'
op|')'
newline|'\n'
name|'self'
op|'.'
name|'assertEqual'
op|'('
name|'robj'
op|'.'
name|'code'
op|','
number|'200'
op|')'
newline|'\n'
nl|'\n'
DECL|member|test_modified_code
dedent|''
name|'def'
name|'test_modified_code'
op|'('
name|'self'
op|')'
op|':'
newline|'\n'
indent|' '
name|'robj'
op|'='
name|'wsgi'
op|'.'
name|'ResponseObject'
op|'('
op|'{'
op|'}'
op|')'
newline|'\n'
name|'robj'
op|'.'
name|'_default_code'
op|'='
number|'202'
newline|'\n'
name|'self'
op|'.'
name|'assertEqual'
op|'('
name|'robj'
op|'.'
name|'code'
op|','
number|'202'
op|')'
newline|'\n'
nl|'\n'
DECL|member|test_override_default_code
dedent|''
name|'def'
name|'test_override_default_code'
op|'('
name|'self'
op|')'
op|':'
newline|'\n'
indent|' '
name|'robj'
op|'='
name|'wsgi'
op|'.'
name|'ResponseObject'
op|'('
op|'{'
op|'}'
op|','
name|'code'
op|'='
number|'404'
op|')'
newline|'\n'
name|'self'
op|'.'
name|'assertEqual'
op|'('
name|'robj'
op|'.'
name|'code'
op|','
number|'404'
op|')'
newline|'\n'
nl|'\n'
DECL|member|test_override_modified_code
dedent|''
name|'def'
name|'test_override_modified_code'
op|'('
name|'self'
op|')'
op|':'
newline|'\n'
indent|' '
name|'robj'
op|'='
name|'wsgi'
op|'.'
name|'ResponseObject'
op|'('
op|'{'
op|'}'
op|','
name|'code'
op|'='
number|'404'
op|')'
newline|'\n'
name|'robj'
op|'.'
name|'_default_code'
op|'='
number|'202'
newline|'\n'
name|'self'
op|'.'
name|'assertEqual'
op|'('
name|'robj'
op|'.'
name|'code'
op|','
number|'404'
op|')'
newline|'\n'
nl|'\n'
DECL|member|test_set_header
dedent|''
name|'def'
name|'test_set_header'
op|'('
name|'self'
op|')'
op|':'
newline|'\n'
indent|' '
name|'robj'
op|'='
name|'wsgi'
op|'.'
name|'ResponseObject'
op|'('
op|'{'
op|'}'
op|')'
newline|'\n'
name|'robj'
op|'['
string|"'Header'"
op|']'
op|'='
string|"'foo'"
newline|'\n'
name|'self'
op|'.'
name|'assertEqual'
op|'('
name|'robj'
op|'.'
name|'headers'
op|','
op|'{'
string|"'header'"
op|':'
string|"'foo'"
op|'}'
op|')'
newline|'\n'
nl|'\n'
DECL|member|test_get_header
dedent|''
name|'def'
name|'test_get_header'
op|'('
name|'self'
op|')'
op|':'
newline|'\n'
indent|' '
name|'robj'
op|'='
name|'wsgi'
op|'.'
name|'ResponseObject'
op|'('
op|'{'
op|'}'
op|')'
newline|'\n'
name|'robj'
op|'['
string|"'Header'"
op|']'
op|'='
string|"'foo'"
newline|'\n'
name|'self'
op|'.'
name|'assertEqual'
op|'('
name|'robj'
op|'['
string|"'hEADER'"
op|']'
op|','
string|"'foo'"
op|')'
newline|'\n'
nl|'\n'
DECL|member|test_del_header
dedent|''
name|'def'
name|'test_del_header'
op|'('
name|'self'
op|')'
op|':'
newline|'\n'
indent|' '
name|'robj'
op|'='
name|'wsgi'
op|'.'
name|'ResponseObject'
op|'('
op|'{'
op|'}'
op|')'
newline|'\n'
name|'robj'
op|'['
string|"'Header'"
op|']'
op|'='
string|"'foo'"
newline|'\n'
name|'del'
name|'robj'
op|'['
string|"'hEADER'"
op|']'
newline|'\n'
name|'self'
op|'.'
name|'assertNotIn'
op|'('
string|"'header'"
op|','
name|'robj'
op|'.'
name|'headers'
op|')'
newline|'\n'
nl|'\n'
DECL|member|test_header_isolation
dedent|''
name|'def'
name|'test_header_isolation'
op|'('
name|'self'
op|')'
op|':'
newline|'\n'
indent|' '
name|'robj'
op|'='
name|'wsgi'
op|'.'
name|'ResponseObject'
op|'('
op|'{'
op|'}'
op|')'
newline|'\n'
name|'robj'
op|'['
string|"'Header'"
op|']'
op|'='
string|"'foo'"
newline|'\n'
name|'hdrs'
op|'='
name|'robj'
op|'.'
name|'headers'
newline|'\n'
name|'hdrs'
op|'['
string|"'hEADER'"
op|']'
op|'='
string|"'bar'"
newline|'\n'
name|'self'
op|'.'
name|'assertEqual'
op|'('
name|'robj'
op|'['
string|"'hEADER'"
op|']'
op|','
string|"'foo'"
op|')'
newline|'\n'
nl|'\n'
nl|'\n'
DECL|class|ValidBodyTest
dedent|''
dedent|''
name|'class'
name|'ValidBodyTest'
op|'('
name|'test'
op|'.'
name|'NoDBTestCase'
op|')'
op|':'
newline|'\n'
nl|'\n'
DECL|member|setUp
indent|' '
name|'def'
name|'setUp'
op|'('
name|'self'
op|')'
op|':'
newline|'\n'
indent|' '
name|'super'
op|'('
name|'ValidBodyTest'
op|','
name|'self'
op|')'
op|'.'
name|'setUp'
op|'('
op|')'
newline|'\n'
name|'self'
op|'.'
name|'controller'
op|'='
name|'wsgi'
op|'.'
name|'Controller'
op|'('
op|')'
newline|'\n'
nl|'\n'
DECL|member|test_is_valid_body
dedent|''
name|'def'
name|'test_is_valid_body'
op|'('
name|'self'
op|')'
op|':'
newline|'\n'
indent|' '
name|'body'
op|'='
op|'{'
string|"'foo'"
op|':'
op|'{'
op|'}'
op|'}'
newline|'\n'
name|'self'
op|'.'
name|'assertTrue'
op|'('
name|'self'
op|'.'
name|'controller'
op|'.'
name|'is_valid_body'
op|'('
name|'body'
op|','
string|"'foo'"
op|')'
op|')'
newline|'\n'
nl|'\n'
DECL|member|test_is_valid_body_none
dedent|''
name|'def'
name|'test_is_valid_body_none'
op|'('
name|'self'
op|')'
op|':'
newline|'\n'
indent|' '
name|'wsgi'
op|'.'
name|'Resource'
op|'('
name|'controller'
op|'='
name|'None'
op|')'
newline|'\n'
name|'self'
op|'.'
name|'assertFalse'
op|'('
name|'self'
op|'.'
name|'controller'
op|'.'
name|'is_valid_body'
op|'('
name|'None'
op|','
string|"'foo'"
op|')'
op|')'
newline|'\n'
nl|'\n'
DECL|member|test_is_valid_body_empty
dedent|''
name|'def'
name|'test_is_valid_body_empty'
op|'('
name|'self'
op|')'
op|':'
newline|'\n'
indent|' '
name|'wsgi'
op|'.'
name|'Resource'
op|'('
name|'controller'
op|'='
name|'None'
op|')'
newline|'\n'
name|'self'
op|'.'
name|'assertFalse'
op|'('
name|'self'
op|'.'
name|'controller'
op|'.'
name|'is_valid_body'
op|'('
op|'{'
op|'}'
op|','
string|"'foo'"
op|')'
op|')'
newline|'\n'
nl|'\n'
DECL|member|test_is_valid_body_no_entity
dedent|''
name|'def'
name|'test_is_valid_body_no_entity'
op|'('
name|'self'
op|')'
op|':'
newline|'\n'
indent|' '
name|'wsgi'
op|'.'
name|'Resource'
op|'('
name|'controller'
op|'='
name|'None'
op|')'
newline|'\n'
name|'body'
op|'='
op|'{'
string|"'bar'"
op|':'
op|'{'
op|'}'
op|'}'
newline|'\n'
name|'self'
op|'.'
name|'assertFalse'
op|'('
name|'self'
op|'.'
name|'controller'
op|'.'
name|'is_valid_body'
op|'('
name|'body'
op|','
string|"'foo'"
op|')'
op|')'
newline|'\n'
nl|'\n'
DECL|member|test_is_valid_body_malformed_entity
dedent|''
name|'def'
name|'test_is_valid_body_malformed_entity'
op|'('
name|'self'
op|')'
op|':'
newline|'\n'
indent|' '
name|'wsgi'
op|'.'
name|'Resource'
op|'('
name|'controller'
op|'='
name|'None'
op|')'
newline|'\n'
name|'body'
op|'='
op|'{'
string|"'foo'"
op|':'
string|"'bar'"
op|'}'
newline|'\n'
name|'self'
op|'.'
name|'assertFalse'
op|'('
name|'self'
op|'.'
name|'controller'
op|'.'
name|'is_valid_body'
op|'('
name|'body'
op|','
string|"'foo'"
op|')'
op|')'
newline|'\n'
nl|'\n'
nl|'\n'
DECL|class|TestController
dedent|''
dedent|''
name|'class'
name|'TestController'
op|'('
name|'test'
op|'.'
name|'NoDBTestCase'
op|')'
op|':'
newline|'\n'
DECL|member|test_check_for_versions_intersection_negative
indent|' '
name|'def'
name|'test_check_for_versions_intersection_negative'
op|'('
name|'self'
op|')'
op|':'
newline|'\n'
indent|' '
name|'func_list'
op|'='
op|'['
name|'versioned_method'
op|'.'
name|'VersionedMethod'
op|'('
string|"'foo'"
op|','
nl|'\n'
name|'api_version'
op|'.'
name|'APIVersionRequest'
op|'('
nl|'\n'
string|"'2.1'"
op|')'
op|','
nl|'\n'
name|'api_version'
op|'.'
name|'APIVersionRequest'
op|'('
nl|'\n'
string|"'2.4'"
op|')'
op|','
nl|'\n'
name|'None'
op|')'
op|','
nl|'\n'
name|'versioned_method'
op|'.'
name|'VersionedMethod'
op|'('
string|"'foo'"
op|','
nl|'\n'
name|'api_version'
op|'.'
name|'APIVersionRequest'
op|'('
nl|'\n'
string|"'2.11'"
op|')'
op|','
nl|'\n'
name|'api_version'
op|'.'
name|'APIVersionRequest'
op|'('
nl|'\n'
string|"'3.1'"
op|')'
op|','
nl|'\n'
name|'None'
op|')'
op|','
nl|'\n'
name|'versioned_method'
op|'.'
name|'VersionedMethod'
op|'('
string|"'foo'"
op|','
nl|'\n'
name|'api_version'
op|'.'
name|'APIVersionRequest'
op|'('
nl|'\n'
string|"'2.8'"
op|')'
op|','
nl|'\n'
name|'api_version'
op|'.'
name|'APIVersionRequest'
op|'('
nl|'\n'
string|"'2.9'"
op|')'
op|','
nl|'\n'
name|'None'
op|')'
op|','
nl|'\n'
op|']'
newline|'\n'
nl|'\n'
name|'result'
op|'='
name|'wsgi'
op|'.'
name|'Controller'
op|'.'
name|'check_for_versions_intersection'
op|'('
name|'func_list'
op|'='
nl|'\n'
name|'func_list'
op|')'
newline|'\n'
name|'self'
op|'.'
name|'assertFalse'
op|'('
name|'result'
op|')'
newline|'\n'
nl|'\n'
name|'func_list'
op|'='
op|'['
name|'versioned_method'
op|'.'
name|'VersionedMethod'
op|'('
string|"'foo'"
op|','
nl|'\n'
name|'api_version'
op|'.'
name|'APIVersionRequest'
op|'('
nl|'\n'
string|"'2.12'"
op|')'
op|','
nl|'\n'
name|'api_version'
op|'.'
name|'APIVersionRequest'
op|'('
nl|'\n'
string|"'2.14'"
op|')'
op|','
nl|'\n'
name|'None'
op|')'
op|','
nl|'\n'
name|'versioned_method'
op|'.'
name|'VersionedMethod'
op|'('
string|"'foo'"
op|','
nl|'\n'
name|'api_version'
op|'.'
name|'APIVersionRequest'
op|'('
nl|'\n'
string|"'3.0'"
op|')'
op|','
nl|'\n'
name|'api_version'
op|'.'
name|'APIVersionRequest'
op|'('
nl|'\n'
string|"'3.4'"
op|')'
op|','
nl|'\n'
name|'None'
op|')'
nl|'\n'
op|']'
newline|'\n'
nl|'\n'
name|'result'
op|'='
name|'wsgi'
op|'.'
name|'Controller'
op|'.'
name|'check_for_versions_intersection'
op|'('
name|'func_list'
op|'='
nl|'\n'
name|'func_list'
op|')'
newline|'\n'
name|'self'
op|'.'
name|'assertFalse'
op|'('
name|'result'
op|')'
newline|'\n'
nl|'\n'
DECL|member|test_check_for_versions_intersection_positive
dedent|''
name|'def'
name|'test_check_for_versions_intersection_positive'
op|'('
name|'self'
op|')'
op|':'
newline|'\n'
indent|' '
name|'func_list'
op|'='
op|'['
name|'versioned_method'
op|'.'
name|'VersionedMethod'
op|'('
string|"'foo'"
op|','
nl|'\n'
name|'api_version'
op|'.'
name|'APIVersionRequest'
op|'('
nl|'\n'
string|"'2.1'"
op|')'
op|','
nl|'\n'
name|'api_version'
op|'.'
name|'APIVersionRequest'
op|'('
nl|'\n'
string|"'2.4'"
op|')'
op|','
nl|'\n'
name|'None'
op|')'
op|','
nl|'\n'
name|'versioned_method'
op|'.'
name|'VersionedMethod'
op|'('
string|"'foo'"
op|','
nl|'\n'
name|'api_version'
op|'.'
name|'APIVersionRequest'
op|'('
nl|'\n'
string|"'2.3'"
op|')'
op|','
nl|'\n'
name|'api_version'
op|'.'
name|'APIVersionRequest'
op|'('
nl|'\n'
string|"'3.0'"
op|')'
op|','
nl|'\n'
name|'None'
op|')'
op|','
nl|'\n'
name|'versioned_method'
op|'.'
name|'VersionedMethod'
op|'('
string|"'foo'"
op|','
nl|'\n'
name|'api_version'
op|'.'
name|'APIVersionRequest'
op|'('
nl|'\n'
string|"'2.8'"
op|')'
op|','
nl|'\n'
name|'api_version'
op|'.'
name|'APIVersionRequest'
op|'('
nl|'\n'
string|"'2.9'"
op|')'
op|','
nl|'\n'
name|'None'
op|')'
op|','
nl|'\n'
op|']'
newline|'\n'
nl|'\n'
name|'result'
op|'='
name|'wsgi'
op|'.'
name|'Controller'
op|'.'
name|'check_for_versions_intersection'
op|'('
name|'func_list'
op|'='
nl|'\n'
name|'func_list'
op|')'
newline|'\n'
name|'self'
op|'.'
name|'assertTrue'
op|'('
name|'result'
op|')'
newline|'\n'
dedent|''
dedent|''
endmarker|''
end_unit
| 12.272518
| 205
| 0.59081
| 14,397
| 101,371
| 4.089463
| 0.029173
| 0.160813
| 0.106495
| 0.067871
| 0.941691
| 0.918184
| 0.892214
| 0.873684
| 0.848818
| 0.813387
| 0
| 0.005082
| 0.109084
| 101,371
| 8,259
| 206
| 12.274004
| 0.646828
| 0
| 0
| 0.961739
| 0
| 0.000484
| 0.336467
| 0.03503
| 0
| 0
| 0
| 0
| 0.016951
| 0
| null | null | 0.000363
| 0.001816
| null | null | 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 8
|
1f60d19b19b575376bbede4de3531070e36dcfb6
| 1,305
|
py
|
Python
|
sound/app/models.py
|
shazlycode/sound-site
|
bee61f972eda0ec44b5a8466b0152b56015c307c
|
[
"bzip2-1.0.6"
] | null | null | null |
sound/app/models.py
|
shazlycode/sound-site
|
bee61f972eda0ec44b5a8466b0152b56015c307c
|
[
"bzip2-1.0.6"
] | null | null | null |
sound/app/models.py
|
shazlycode/sound-site
|
bee61f972eda0ec44b5a8466b0152b56015c307c
|
[
"bzip2-1.0.6"
] | null | null | null |
from django.db import models
# Create your models here.
class Baset(models.Model):
player=models.CharField(max_length=100, default='')
title= models.CharField(max_length=100, verbose_name="اسم السورة")
# source=models.FileField(upload_to='sound',default='non.mp3' )
path=models.CharField(max_length=100, default='')
def __str__(self):
return self.title
class Radio(models.Model):
title= models.CharField(max_length=100)
path= models.CharField(max_length=100)
def __str__(self):
return self.title
class Menshawy(models.Model):
player=models.CharField(max_length=100, default='محمد صديق المنشاوي')
title= models.CharField(max_length=100)
path= models.CharField(max_length=100)
def __str__(self):
return self.title
class Husary(models.Model):
player=models.CharField(max_length=100, default='محمود خليل الحصري')
title= models.CharField(max_length=100)
path= models.CharField(max_length=100)
def __str__(self):
return self.title
class Bana(models.Model):
player=models.CharField(max_length=100, default='محمود علي البنا')
title= models.CharField(max_length=100)
path= models.CharField(max_length=100)
def __str__(self):
return self.title
| 29.659091
| 73
| 0.694253
| 170
| 1,305
| 5.117647
| 0.270588
| 0.241379
| 0.289655
| 0.386207
| 0.782759
| 0.782759
| 0.702299
| 0.667816
| 0.667816
| 0.550575
| 0
| 0.04072
| 0.190805
| 1,305
| 44
| 74
| 29.659091
| 0.783144
| 0.0659
| 0
| 0.6
| 0
| 0
| 0.049302
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0.166667
| false
| 0
| 0.033333
| 0.166667
| 1
| 0
| 0
| 0
| 0
| null | 1
| 1
| 1
| 0
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 1
| 0
|
0
| 7
|
1f66c70fe1eb52872a8a9a3085a80d560e91e639
| 3,410
|
py
|
Python
|
BreastCancerController.py
|
iliaskaras/ML-BreastCancer
|
5e2bae2d6fd5e76d4164f8be58f9e2951d053d3c
|
[
"Apache-2.0"
] | null | null | null |
BreastCancerController.py
|
iliaskaras/ML-BreastCancer
|
5e2bae2d6fd5e76d4164f8be58f9e2951d053d3c
|
[
"Apache-2.0"
] | null | null | null |
BreastCancerController.py
|
iliaskaras/ML-BreastCancer
|
5e2bae2d6fd5e76d4164f8be58f9e2951d053d3c
|
[
"Apache-2.0"
] | 1
|
2020-08-31T22:20:46.000Z
|
2020-08-31T22:20:46.000Z
|
from BreastCancerDecisionTreeClf import BreastCancerDecisionTreeClf
from GraphvizCreator import GraphvizCreator
#call maxDepth = 20 ================================================
breastCancerDecisionTree = BreastCancerDecisionTreeClf()
breastCancerDecisionTree.createDecisionTreeClassifier("gini",20)
graphvizCreator = GraphvizCreator(breastCancerDecisionTree)
breastCancerDecisionTree.getCrossValidation()
graphvizCreator.exportGraph()
breastCancerDecisionTree = None
breastCancerDecisionTree = BreastCancerDecisionTreeClf()
breastCancerDecisionTree.createDecisionTreeClassifier("entropy",20)
breastCancerDecisionTree.getCrossValidation()
graphvizCreator.changeDecisionTreeClfObject(breastCancerDecisionTree)
graphvizCreator.exportGraph()
#call maxDepth = 12 ================================================
breastCancerDecisionTree = None
breastCancerDecisionTree = BreastCancerDecisionTreeClf()
breastCancerDecisionTree.createDecisionTreeClassifier("gini",12)
graphvizCreator = GraphvizCreator(breastCancerDecisionTree)
breastCancerDecisionTree.getCrossValidation()
graphvizCreator.exportGraph()
breastCancerDecisionTree = None
breastCancerDecisionTree = BreastCancerDecisionTreeClf()
breastCancerDecisionTree.createDecisionTreeClassifier("entropy",12)
breastCancerDecisionTree.getCrossValidation()
graphvizCreator.changeDecisionTreeClfObject(breastCancerDecisionTree)
graphvizCreator.exportGraph()
#call maxDepth = 8 ================================================
breastCancerDecisionTree = None
breastCancerDecisionTree = BreastCancerDecisionTreeClf()
breastCancerDecisionTree.createDecisionTreeClassifier("gini",8)
graphvizCreator = GraphvizCreator(breastCancerDecisionTree)
breastCancerDecisionTree.getCrossValidation()
graphvizCreator.exportGraph()
breastCancerDecisionTree = None
breastCancerDecisionTree = BreastCancerDecisionTreeClf()
breastCancerDecisionTree.createDecisionTreeClassifier("entropy",8)
breastCancerDecisionTree.getCrossValidation()
graphvizCreator.changeDecisionTreeClfObject(breastCancerDecisionTree)
graphvizCreator.exportGraph()
#call maxDepth = 5 ================================================
breastCancerDecisionTree = None
breastCancerDecisionTree = BreastCancerDecisionTreeClf()
breastCancerDecisionTree.createDecisionTreeClassifier("gini",5)
graphvizCreator = GraphvizCreator(breastCancerDecisionTree)
breastCancerDecisionTree.getCrossValidation()
graphvizCreator.exportGraph()
breastCancerDecisionTree = None
breastCancerDecisionTree = BreastCancerDecisionTreeClf()
breastCancerDecisionTree.createDecisionTreeClassifier("entropy",5)
breastCancerDecisionTree.getCrossValidation()
graphvizCreator.changeDecisionTreeClfObject(breastCancerDecisionTree)
graphvizCreator.exportGraph()
#call maxDepth = 2 ================================================
breastCancerDecisionTree = None
breastCancerDecisionTree = BreastCancerDecisionTreeClf()
breastCancerDecisionTree.createDecisionTreeClassifier("gini",2)
graphvizCreator = GraphvizCreator(breastCancerDecisionTree)
breastCancerDecisionTree.getCrossValidation()
graphvizCreator.exportGraph()
breastCancerDecisionTree = None
breastCancerDecisionTree = BreastCancerDecisionTreeClf()
breastCancerDecisionTree.createDecisionTreeClassifier("entropy",2)
breastCancerDecisionTree.getCrossValidation()
graphvizCreator.changeDecisionTreeClfObject(breastCancerDecisionTree)
graphvizCreator.exportGraph()
| 35.520833
| 69
| 0.821701
| 171
| 3,410
| 16.385965
| 0.111111
| 0.182013
| 0.267666
| 0.367595
| 0.951106
| 0.912919
| 0.912919
| 0.672377
| 0.672377
| 0.463954
| 0
| 0.006479
| 0.04956
| 3,410
| 95
| 70
| 35.894737
| 0.858069
| 0.097361
| 0
| 0.803279
| 0
| 0
| 0.017909
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| false
| 0
| 0.032787
| 0
| 0.032787
| 0
| 0
| 0
| 1
| null | 0
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 7
|
1f8a2b23b8cc9f7f8169fa07ab0b9093651d3e36
| 1,897
|
py
|
Python
|
tests/test_unit_sh.py
|
IBM/python-itoolk
|
36054a7ebdd8f5556c548d4c315e00e3c8d04904
|
[
"MIT"
] | 11
|
2019-01-09T12:31:04.000Z
|
2021-08-29T05:26:35.000Z
|
tests/test_unit_sh.py
|
jkyeung/python-itoolkit
|
ca11d532220bb0fff7afca540b46bd179b6f2b2d
|
[
"MIT"
] | 50
|
2018-12-21T18:52:25.000Z
|
2021-05-25T13:38:15.000Z
|
tests/test_unit_sh.py
|
jkyeung/python-itoolkit
|
ca11d532220bb0fff7afca540b46bd179b6f2b2d
|
[
"MIT"
] | 9
|
2018-12-25T00:02:19.000Z
|
2022-02-22T00:58:13.000Z
|
import xml.etree.ElementTree as ET
from itoolkit import iSh
def test_sh():
cmd = 'ls -l'
key = 'lljqezl'
element = ET.fromstring(iSh(key, cmd).xml_in())
assert(element.tag == 'sh')
assert('error' in element.attrib)
assert(element.attrib['error'] == 'fast')
assert('var' in element.attrib)
assert(element.attrib['var'] == key)
assert(element.text == cmd)
def test_sh_error_on():
cmd = 'ls -l'
key = 'rtoiu1nqew'
error = 'on'
element = ET.fromstring(iSh(key, cmd, {'error': error}).xml_in())
assert(element.tag == 'sh')
assert('error' in element.attrib)
assert(element.attrib['error'] == error)
assert('var' in element.attrib)
assert(element.attrib['var'] == key)
assert(element.text == cmd)
def test_sh_error_off():
cmd = 'ls -l'
key = 'lkjwernm'
error = 'off'
element = ET.fromstring(iSh(key, cmd, {'error': error}).xml_in())
assert(element.tag == 'sh')
assert('error' in element.attrib)
assert(element.attrib['error'] == error)
assert('var' in element.attrib)
assert(element.attrib['var'] == key)
assert(element.text == cmd)
def test_sh_row_on():
cmd = 'ls -l'
key = 'rtoiu1nqew'
row = 'on'
element = ET.fromstring(iSh(key, cmd, {'row': row}).xml_in())
assert(element.tag == 'sh')
assert('row' in element.attrib)
assert(element.attrib['row'] == row)
assert('var' in element.attrib)
assert(element.attrib['var'] == key)
assert(element.text == cmd)
def test_sh_row_off():
cmd = 'ls -l'
key = 'lkjwernm'
row = 'off'
element = ET.fromstring(iSh(key, cmd, {'row': row}).xml_in())
assert(element.tag == 'sh')
assert('row' in element.attrib)
assert(element.attrib['row'] == row)
assert('var' in element.attrib)
assert(element.attrib['var'] == key)
assert(element.text == cmd)
| 21.556818
| 69
| 0.604639
| 254
| 1,897
| 4.444882
| 0.125984
| 0.230292
| 0.132861
| 0.186005
| 0.915855
| 0.915855
| 0.818423
| 0.809566
| 0.809566
| 0.809566
| 0
| 0.001345
| 0.216131
| 1,897
| 87
| 70
| 21.804598
| 0.757902
| 0
| 0
| 0.75
| 0
| 0
| 0.094887
| 0
| 0
| 0
| 0
| 0
| 0.535714
| 1
| 0.089286
| false
| 0
| 0.035714
| 0
| 0.125
| 0
| 0
| 0
| 0
| null | 1
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 10
|
1f91221f26945c05a44c475bc67642c44db2b396
| 87
|
py
|
Python
|
test_file.py
|
atulrvaghela/test_repository
|
eea3605c3c88a102b46e7235636aedeaf555d6a1
|
[
"MIT"
] | null | null | null |
test_file.py
|
atulrvaghela/test_repository
|
eea3605c3c88a102b46e7235636aedeaf555d6a1
|
[
"MIT"
] | null | null | null |
test_file.py
|
atulrvaghela/test_repository
|
eea3605c3c88a102b46e7235636aedeaf555d6a1
|
[
"MIT"
] | null | null | null |
print("This is test file for git repository...")
print("this is edited text test...")
| 21.75
| 48
| 0.689655
| 14
| 87
| 4.285714
| 0.714286
| 0.3
| 0.366667
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.149425
| 87
| 3
| 49
| 29
| 0.810811
| 0
| 0
| 0
| 0
| 0
| 0.758621
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| true
| 0
| 0
| 0
| 0
| 1
| 1
| 0
| 0
| null | 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 1
|
0
| 7
|
1f91eef6de5524bd60144c13025939317f691624
| 93
|
py
|
Python
|
src/1-prepare/cmftools/nb/__init__.py
|
DirkEilander/compound_hotspots
|
f9d7960633be80e8e24d2f2563df367cc3f060c6
|
[
"BSD-3-Clause"
] | 1
|
2022-01-17T07:02:13.000Z
|
2022-01-17T07:02:13.000Z
|
src/1-prepare/cmftools/nb/__init__.py
|
DirkEilander/compound_hotspots
|
f9d7960633be80e8e24d2f2563df367cc3f060c6
|
[
"BSD-3-Clause"
] | null | null | null |
src/1-prepare/cmftools/nb/__init__.py
|
DirkEilander/compound_hotspots
|
f9d7960633be80e8e24d2f2563df367cc3f060c6
|
[
"BSD-3-Clause"
] | 1
|
2022-01-17T02:48:28.000Z
|
2022-01-17T02:48:28.000Z
|
from .nb_io import *
from .dd_ops import *
from .nb_ops import *
from .dd_funcs import *
| 18.6
| 24
| 0.698925
| 16
| 93
| 3.8125
| 0.4375
| 0.491803
| 0.393443
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.215054
| 93
| 4
| 25
| 23.25
| 0.835616
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| true
| 0
| 1
| 0
| 1
| 0
| 1
| 0
| 0
| null | 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 0
| 1
| 0
|
0
| 7
|
2f0964a06af37ca793a19c01c4bd42e1f6cbc31d
| 47
|
py
|
Python
|
math_utils.py
|
mfatihaktas/service-capacity
|
5480f6c1a95fdf6e9b49f40be1e5196951982223
|
[
"MIT"
] | null | null | null |
math_utils.py
|
mfatihaktas/service-capacity
|
5480f6c1a95fdf6e9b49f40be1e5196951982223
|
[
"MIT"
] | null | null | null |
math_utils.py
|
mfatihaktas/service-capacity
|
5480f6c1a95fdf6e9b49f40be1e5196951982223
|
[
"MIT"
] | null | null | null |
import math
def mlog(x):
return math.log(x)
| 9.4
| 20
| 0.680851
| 9
| 47
| 3.555556
| 0.777778
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.191489
| 47
| 4
| 21
| 11.75
| 0.842105
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0.333333
| false
| 0
| 0.333333
| 0.333333
| 1
| 0
| 1
| 1
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 1
| 1
| 1
| 0
|
0
| 7
|
c85832cad87a8b8a0946ea0514008c0e7189a457
| 160
|
py
|
Python
|
Mundo 1/Desafio 1.py
|
lucashsbarros/Python_Curso
|
01f7b5c89636cacc774d38b39951ac2b73272c63
|
[
"MIT"
] | null | null | null |
Mundo 1/Desafio 1.py
|
lucashsbarros/Python_Curso
|
01f7b5c89636cacc774d38b39951ac2b73272c63
|
[
"MIT"
] | null | null | null |
Mundo 1/Desafio 1.py
|
lucashsbarros/Python_Curso
|
01f7b5c89636cacc774d38b39951ac2b73272c63
|
[
"MIT"
] | null | null | null |
nome = input ('Qual o seu nome?')
print ('Olá',nome,'! Que bom que apareceu')
nome = input ('Qual o seu nome?')
print ('Olá',nome,'!' 'Que bom que apareceu')
| 22.857143
| 45
| 0.625
| 26
| 160
| 3.846154
| 0.384615
| 0.18
| 0.26
| 0.28
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0.175
| 160
| 6
| 46
| 26.666667
| 0.757576
| 0
| 0
| 0.5
| 0
| 0
| 0.512658
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| false
| 0
| 0
| 0
| 0
| 0.5
| 1
| 0
| 0
| null | 0
| 1
| 1
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
|
0
| 11
|
c0d8ea31732f21e74b02c56bd49f1fcaad9b8772
| 5,320
|
py
|
Python
|
movements.py
|
Llana13/Supermarket
|
8ecdf2e55e774ad30d21cb2a83b2bb0af005128e
|
[
"MIT"
] | 1
|
2020-04-20T10:45:58.000Z
|
2020-04-20T10:45:58.000Z
|
movements.py
|
Llana13/Supermarket
|
8ecdf2e55e774ad30d21cb2a83b2bb0af005128e
|
[
"MIT"
] | null | null | null |
movements.py
|
Llana13/Supermarket
|
8ecdf2e55e774ad30d21cb2a83b2bb0af005128e
|
[
"MIT"
] | null | null | null |
import cv2
from classes import Customer
def move_to(customer, target_y, target_x):
# up
if target_y < customer.y and target_x is customer.x:
while not customer.y == target_y:
customer.vy = -10
customer.vx = 0
customer.draw()
customer.move()
if cv2.waitKey(25) == ord('q'):
break
# down
elif target_y > customer.y and target_x is customer.x:
while customer.y < target_y:
customer.vy = 10
customer.vx= 0
customer.draw()
customer.move()
if cv2.waitKey(25) == ord('q'):
break
# left
elif target_x < customer.x and target_y is customer.y:
while customer.x > target_x:
customer.vx = -10
customer.vy = 0
customer.draw()
customer.move()
if cv2.waitKey(25) == ord('q'):
break
# right
else:
while customer.x < target_x:
customer.vx = 10
customer.vy = 0
customer.draw()
customer.move()
if cv2.waitKey(25) == ord('q'):
break
# Functions naming: Start_Destination
def entrance_fruit(start_coord,vv):
customer = Customer(start_coord, vv)
move_to(customer, 250, customer.x) # e to fu
def entrance_spices(start_coord,vv):
customer = Customer(start_coord, vv)
move_to(customer, 60, customer.x) # e to fu
move_to(customer, customer.y, 550) # fu to su
move_to(customer, 250, customer.x) # su to s
def entrance_dairy(start_coord,vv):
customer = Customer(start_coord, vv)
move_to(customer, 60, customer.x) # e to fu
move_to(customer, customer.y, 315) # su to du
move_to(customer, 250, customer.x) # du to d
def entrance_drinks(start_coord,vv):
customer = Customer(start_coord, vv)
move_to(customer, 60, customer.x) # e to fu
move_to(customer, customer.y, 85) # fu to ru
move_to(customer, 250, customer.x) # ru to r
def fruit_spices(start_coord,vv):
customer = Customer(start_coord, vv)
move_to(customer, 60, customer.x) # f to fu
move_to(customer, customer.y, 550) # fu to su
move_to(customer, 250, customer.x) # su to s
def fruit_dairy(start_coord,vv):
customer = Customer(start_coord, vv)
move_to(customer, 60, customer.x) # f to fu
move_to(customer, customer.y, 315) # su to du
move_to(customer, 250, customer.x) # du to d
def fruit_drinks(start_coord,vv):
customer = Customer(start_coord, vv)
move_to(customer, 60, customer.x) # f to fu
move_to(customer, customer.y, 85) # du to ru
move_to(customer, 250, customer.x) # ru to r
def spices_fruit(start_coord,vv):
customer = Customer(start_coord, vv)
move_to(customer, 60, customer.x)# s to su
move_to(customer, customer.y, 770)# su to fu
move_to(customer, 250, customer.x)# fu to f
def spices_dairy(start_coord,vv):
customer = Customer(start_coord, vv)
move_to(customer, 60, customer.x)# s to su
move_to(customer, customer.y, 315)# su to du
move_to(customer, 250, customer.x)# du to d
def spices_drinks(start_coord,vv):
customer = Customer(start_coord, vv)
move_to(customer, 60, customer.x)# s to su
move_to(customer, customer.y, 85) # du to ru
move_to(customer, 250, customer.x) # ru to r
def dairy_fruit(start_coord,vv):
customer = Customer(start_coord, vv)
move_to(customer, 60, customer.x) # d to du
move_to(customer, customer.y, 770)# su to fu
move_to(customer, 250, customer.x)# fu to f
def dairy_spices(start_coord,vv):
customer = Customer(start_coord, vv)
move_to(customer, 60, customer.x) # d to du
move_to(customer, customer.x, 550) # du to su
move_to(customer, 250, customer.x) # su to s
def dairy_drinks(start_coord,vv):
customer = Customer(start_coord, vv)
move_to(customer, 60, customer.x) # d to du
move_to(customer, customer.y, 85) # du to ru
move_to(customer, 250, customer.x) # ru to
def drinks_fruit(start_coord,vv):
customer = Customer(start_coord, vv)
move_to(customer, 60, customer.x) # r to ru
move_to(customer, customer.y, 770)# ru to fu
move_to(customer, 250, customer.x)# fu to
def drinks_spices(start_coord,vv):
customer = Customer(start_coord, vv)
move_to(customer, 60, customer.x) # r to ru
move_to(customer, customer.y, 550) # ru to su
move_to(customer, 250, customer.x) # su to s
def drinks_dairy(start_coord,vv):
customer = Customer(start_coord, vv)
move_to(customer, 60, customer.x) # r to ru
move_to(customer, customer.y, 315) # ru to du
move_to(customer, 250, customer.x) # du to d
def to_checkout(start_coord,vv):
customer = Customer(start_coord, vv)
if customer.x == 770: # fruit
move_to(customer, 450, customer.x)
move_to(customer, customer.y, 500)
move_to(customer, 600, customer.x)
elif customer.x == 550: # spices
move_to(customer, 450, customer.x)
move_to(customer, customer.y, 355)
move_to(customer, 600, customer.x)
elif customer.x == 315: #dairy
move_to(customer, 450, customer.x)
move_to(customer, customer.y, 218)
move_to(customer, 600, customer.x)
else: # drinks
move_to(customer, 600, customer.x)
| 33.459119
| 58
| 0.639662
| 821
| 5,320
| 3.997564
| 0.077954
| 0.104205
| 0.243144
| 0.120658
| 0.892139
| 0.890615
| 0.868373
| 0.868373
| 0.855576
| 0.83181
| 0
| 0.045739
| 0.247932
| 5,320
| 158
| 59
| 33.670886
| 0.774556
| 0.086654
| 0
| 0.705426
| 0
| 0
| 0.000833
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0.139535
| false
| 0
| 0.015504
| 0
| 0.155039
| 0
| 0
| 0
| 0
| null | 0
| 1
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 8
|
23be363209fa860af54f5693dcd2010b0fdd931f
| 95,001
|
py
|
Python
|
ManufacturingNet/models/svm.py
|
lalitjg/ManufacturingNet
|
5a734e90c77ac0757499353fa3e82bcffe12d1f0
|
[
"MIT"
] | 9
|
2020-12-07T21:04:44.000Z
|
2021-11-23T14:43:28.000Z
|
ManufacturingNet/models/svm.py
|
BaratiLab/ManufacturingNet
|
5a734e90c77ac0757499353fa3e82bcffe12d1f0
|
[
"MIT"
] | null | null | null |
ManufacturingNet/models/svm.py
|
BaratiLab/ManufacturingNet
|
5a734e90c77ac0757499353fa3e82bcffe12d1f0
|
[
"MIT"
] | 2
|
2021-01-15T18:38:21.000Z
|
2021-09-18T01:50:23.000Z
|
"""SVM can train SVC, NuSVC, LinearSVC, SVR, NuSVR, and LinearSVR
models implemented by Scikit-Learn on the given dataset. Before
training, the user is prompted for parameter input. After training,
model metrics are displayed, and the user can make new predictions.
Classification and regression are both supported.
View the documentation at https://manufacturingnet.readthedocs.io/.
"""
from math import sqrt
import matplotlib.pyplot as plt
from sklearn.metrics import (accuracy_score, confusion_matrix, make_scorer,
mean_squared_error, roc_auc_score, roc_curve)
from sklearn.model_selection import (GridSearchCV, cross_val_score,
train_test_split)
from sklearn.svm import SVC, SVR, LinearSVC, LinearSVR, NuSVC, NuSVR
class SVM:
"""Class model for support vector machine (SVM) models."""
def __init__(self, attributes=None, labels=None):
"""Initializes a SVM object."""
self.attributes = attributes
self.labels = labels
self.test_size = None
self.cv = None
self.graph_results = None
self.fpr = None
self.tpr = None
self.bin = False
self.gridsearch = False
self.gs_params = None
self.gs_result = None
self.classifier_SVC = None
self.accuracy_SVC = None
self.roc_auc_SVC = None
self.confusion_matrix_SVC = None
self.cross_val_scores_SVC = None
self.classifier_nu_SVC = None
self.accuracy_nu_SVC = None
self.roc_auc_nu_SVC = None
self.confusion_matrix_nu_SVC = None
self.cross_val_scores_nu_SVC = None
self.classifier_linear_SVC = None
self.accuracy_linear_SVC = None
self.cross_val_scores_linear_SVC = None
self.regressor_SVR = None
self.mean_squared_error_SVR = None
self.r2_score_SVR = None
self.r_score_SVR = None
self.cross_val_scores_SVR = None
self.regressor_nu_SVR = None
self.mean_squared_error_nu_SVR = None
self.r2_score_nu_SVR = None
self.r_score_nu_SVR = None
self.cross_val_scores_nu_SVR = None
self.regressor_linear_SVR = None
self.mean_squared_error_linear_SVR = None
self.r2_score_linear_SVR = None
self.r_score_linear_SVR = None
self.cross_val_scores_linear_SVR = None
# References to training and testing subsets of dataset
# For re-use purposes
self.dataset_X_train = None
self.dataset_y_train = None
self.dataset_X_test = None
self.dataset_y_test = None
# Accessor Methods
def get_attributes(self):
"""Accessor method for attributes."""
return self.attributes
def get_labels(self):
"""Accessor method for labels."""
return self.labels
def get_classifier_SVC(self):
"""Accessor method for classifier_SVC."""
return self.classifier_SVC
def get_accuracy_SVC(self):
"""Accessor method for accuracy_SVC."""
return self.accuracy_SVC
def get_roc_auc_SVC(self):
"""Accessor method for roc_auc_SVC."""
return self.roc_auc_SVC
def get_confusion_matrix_SVC(self):
"""Accessor method for confusion_matrix_SVC."""
return self.confusion_matrix_SVC
def get_cross_val_scores_SVC(self):
"""Accessor method for cross_val_scores_SVC."""
return self.cross_val_scores_SVC
def get_classifier_nu_SVC(self):
"""Accessor method for classifier_nu_SVC."""
return self.classifier_nu_SVC
def get_accuracy_nu_SVC(self):
"""Accessor method for accuracy_nu_SVC."""
return self.accuracy_nu_SVC
def get_roc_auc_nu_SVC(self):
"""Accessor method for roc_auc_nu_SVC."""
return self.roc_auc_nu_SVC
def get_confusion_matrix_nu_SVC(self):
"""Accessor method for confusion_matrix_nu_SVC."""
return self.confusion_matrix_nu_SVC
def get_cross_val_scores_nu_SVC(self):
"""Accessor method for cross_val_scores_nu_SVC."""
return self.cross_val_scores_nu_SVC
def get_classifier_linear_SVC(self):
"""Accessor method for classifier_linear_SVC."""
return self.classifier_linear_SVC
def get_accuracy_linear_SVC(self):
"""Accessor method for accuracy_linear_SVC."""
return self.accuracy_linear_SVC
def get_cross_val_scores_linear_SVC(self):
"""Accessor method for cross_val_scores_linear_SVC."""
return self.cross_val_scores_linear_SVC
def get_regressor_SVR(self):
"""Accessor method for regressor_SVR."""
return self.regressor_SVR
def get_mean_squared_error_SVR(self):
"""Accessor method for mean_squared_error_SVR."""
return self.mean_squared_error_SVR
def get_r2_score_SVR(self):
"""Accessor method for r2_score_SVR."""
return self.r2_score_SVR
def get_r_score_SVR(self):
"""Accessor method for r_score_SVR."""
return self.r_score_SVR
def get_cross_val_scores_SVR(self):
"""Accessor method for cross_val_scores_SVR."""
return self.cross_val_scores_SVR
def get_regressor_nu_SVR(self):
"""Accessor method for regressor_nu_SVR."""
return self.regressor_nu_SVR
def get_mean_squared_error_nu_SVR(self):
"""Accessor method for mean_squared_error_nu_SVR."""
return self.mean_squared_error_nu_SVR
def get_r2_score_nu_SVR(self):
"""Accessor method for r2_score_nu_SVR."""
return self.r2_score_nu_SVR
def get_r_score_nu_SVR(self):
"""Accessor method for r_score_nu_SVR."""
return self.r_score_nu_SVR
def get_cross_val_scores_nu_SVR(self):
"""Accessor method for cross_val_scores_nu_SVR."""
return self.cross_val_scores_nu_SVR
def get_regressor_linear_SVR(self):
"""Accessor method for regressor_linear_SVR."""
return self.regressor_linear_SVR
def get_mean_squared_error_linear_SVR(self):
"""Accessor method for mean_squared_error_linear_SVR."""
return self.mean_squared_error_linear_SVR
def get_r2_score_linear_SVR(self):
"""Accessor method for r2_score_linear_SVR."""
return self.r2_score_linear_SVR
def get_r_score_linear_SVR(self):
"""Accessor method for r_score_linear_SVR."""
return self.r_score_linear_SVR
def get_cross_val_scores_linear_SVR(self):
"""Accessor method for cross_val_scores_linear_SVR."""
return self.cross_val_scores_linear_SVR
# Modifier Methods
def set_attributes(self, new_attributes=None):
"""Modifier method for attributes."""
self.attributes = new_attributes
def set_labels(self, new_labels=None):
"""Modifier method for labels."""
self.labels = new_labels
# Wrappers for SVM classification classes
def run_SVC(self):
"""Runs SVC model."""
if self._check_inputs():
# Initialize classifier
self.classifier_SVC = self._create_SVC_model(is_nu=False)
# Split data, if needed
# If testing/training sets are still None, call _split_data()
if self.dataset_X_test is None:
self._split_data()
# Train classifier
# Handle exception if arguments are incorrect
try:
self.classifier_SVC.fit(self.dataset_X_train,
self.dataset_y_train)
except Exception as e:
print("An exception occurred while training the SVC model.",
"Check your arguments and try again.")
print("Here is the exception message:")
print(e)
self.classifier_SVC = None
return
# Metrics
self.accuracy_SVC = self.classifier_SVC.score(self.dataset_X_test,
self.dataset_y_test)
y_prediction = self.classifier_SVC.predict(self.dataset_X_test)
probas = self.classifier_SVC.predict_proba(self.dataset_X_test)
# If classification is binary, calculate roc_auc
if probas.shape[1] == 2:
self.bin = True
self.roc_auc_SVC = roc_auc_score(y_prediction, probas[::, 1])
self.fpr, self.tpr, _ = roc_curve(self.dataset_y_test,
probas[::, 1])
# Else, calculate confusion matrix
else:
self.confusion_matrix_SVC = confusion_matrix(self.dataset_y_test,
y_prediction)
self.cross_val_scores_SVC = \
cross_val_score(self.classifier_SVC, self.attributes,
self.labels, cv=self.cv)
# Output results
self._output_classifier_results(model="SVC")
def predict_SVC(self, dataset_X=None):
"""Classifies each datapoint in dataset_X using the SVC model.
Returns the predicted classifications.
"""
# Check that run_SVC() has already been called
if self.classifier_SVC is None:
print("The SVC model seems to be missing. Have you called",
"run_SVC() yet?")
return None
# Try to make the prediction
# Handle exception if dataset_X isn't a valid input
try:
y_prediction = self.classifier_SVC.predict(dataset_X)
except Exception as e:
print("The SVC model failed to run.",
"Check your inputs and try again.")
print("Here is the exception message:")
print(e)
return None
print("\nSVC Predictions:\n", y_prediction, "\n")
return y_prediction
def run_nu_SVC(self):
"""Runs NuSVC model."""
if self._check_inputs():
# Initialize classifier
self.classifier_nu_SVC = self._create_SVC_model(is_nu=True)
# Split data, if needed
# If testing/training sets are still None, call _split_data()
if self.dataset_X_test is None:
self._split_data()
# Train classifier
# Handle exception if arguments are incorrect
try:
self.classifier_nu_SVC.fit(self.dataset_X_train,
self.dataset_y_train)
except Exception as e:
print("An exception occurred while training the NuSVC model.",
"Check your arguments and try again.")
print("Here is the exception message:")
print(e)
self.classifier_nu_SVC = None
return
# Metrics
self.accuracy_nu_SVC =\
self.classifier_nu_SVC.score(self.dataset_X_test,
self.dataset_y_test)
y_prediction = self.classifier_nu_SVC.predict(self.dataset_X_test)
probas = self.classifier_nu_SVC.predict_proba(self.dataset_X_test)
# If classification is binary, calculate roc_auc
if probas.shape[1] == 2:
self.bin = True
self.roc_auc_nu_SVC = roc_auc_score(
y_prediction, probas[::, 1])
self.fpr, self.tpr, _ = \
roc_curve(self.dataset_y_test, probas[::, 1])
# Else, calculate confusion matrix
else:
self.confusion_matrix_nu_SVC = \
confusion_matrix(self.dataset_y_test, y_prediction)
self.cross_val_scores_nu_SVC = \
cross_val_score(self.classifier_nu_SVC, self.attributes,
self.labels, cv=self.cv)
# Output results
self._output_classifier_results(model="NuSVC")
def predict_nu_SVC(self, dataset_X=None):
"""Classifies each datapoint in dataset_X using the NuSVC model.
Returns the predicted classifications.
"""
# Check that run_nu_SVC() has already been called
if self.classifier_nu_SVC is None:
print("The NuSVC model seems to be missing.",
"Have you called run_nu_SVC() yet?")
return None
# Try to make the prediction
# Handle exception if dataset_X isn't a valid input
try:
y_prediction = self.classifier_nu_SVC.predict(dataset_X)
except Exception as e:
print("The NuSVC model failed to run.",
"Check your inputs and try again.")
print("Here is the exception message:")
print(e)
return None
print("\nNuSVC Predictions:\n", y_prediction, "\n")
return y_prediction
def run_linear_SVC(self):
"""Runs LinearSVC model."""
if self._check_inputs():
# Initialize classifier
self.classifier_linear_SVC = self._create_linear_SVC_model()
# Split data, if needed
# If testing/training sets are still None, call _split_data()
if self.dataset_X_test is None:
self._split_data()
# Train classifier
# Handle exception if arguments are incorrect
try:
self.classifier_linear_SVC.fit(self.dataset_X_train,
self.dataset_y_train)
except Exception as e:
print("An exception occurred while training the LinearSVC",
"model. Check your arguments and try again.")
print("Here is the exception message:")
print(e)
self.classifier_linear_SVC = None
return
# Metrics
self.accuracy_linear_SVC = \
self.classifier_linear_SVC.score(self.dataset_X_test,
self.dataset_y_test)
self.cross_val_scores_linear_SVC =\
cross_val_score(self.classifier_linear_SVC, self.attributes,
self.labels, cv=self.cv)
# Output results
self._output_classifier_results(model="LinearSVC")
def predict_linear_SVC(self, dataset_X=None):
"""Classifies each datapoint in dataset_X using the LinearSVC
model. Returns the predicted classifications.
"""
# Check that run_linear_SVC() has already been called
if self.classifier_linear_SVC is None:
print("The LinearSVC model seems to be missing.",
"Have you called run_linear_SVC() yet?")
return None
# Try to make the prediction
# Handle exception if dataset_X isn't a valid input
try:
y_prediction = self.classifier_linear_SVC.predict(dataset_X)
except Exception as e:
print("The LinearSVC model failed to run.",
"Check your inputs and try again.")
print("Here is the exception message:")
print(e)
return None
print("\nLinearSVC Predictions:\n", y_prediction, "\n")
return y_prediction
# Wrappers for SVM regression classes
def run_SVR(self):
"""Runs SVR model."""
if self._check_inputs():
# Initialize regression model
self.regressor_SVR = self._create_SVR_model(is_nu=False)
# Split data, if needed
# If testing/training sets are still None, call _split_data()
if self.dataset_X_test is None:
self._split_data()
# Train regression model
# Handle exception if arguments are incorrect and/or if labels isn't
# quantitative data
try:
self.regressor_SVR.fit(self.dataset_X_train,
self.dataset_y_train)
except Exception as e:
print("An exception occurred while training the SVR model.",
"Check you arguments and try again.")
print("Does labels only contain quantitative data?")
print("Here is the exception message:")
print(e)
self.regressor_SVR = None
return
# Evaluate metrics of model
y_prediction = self.regressor_SVR.predict(self.dataset_X_test)
self.mean_squared_error_SVR = \
mean_squared_error(self.dataset_y_test, y_prediction)
self.r2_score_SVR = self.regressor_SVR.score(self.dataset_X_test,
self.dataset_y_test)
if self.r2_score_SVR >= 0:
self.r_score_SVR = sqrt(self.r2_score_SVR)
self.cross_val_scores_SVR = \
cross_val_score(self.regressor_SVR, self.attributes,
self.labels, cv=self.cv)
# Output results
self._output_regressor_results(model="SVR")
def predict_SVR(self, dataset_X=None):
"""Predicts the output of each datapoint in dataset_X using the
SVR model. Returns the predictions.
"""
# Check that run_SVR() has already been called
if self.regressor_SVR is None:
print("The SVR model seems to be missing.",
"Have you called run_SVR() yet?")
return None
# Try to make the prediction
# Handle exception if dataset_X isn't a valid input
try:
y_prediction = self.regressor_SVR.predict(dataset_X)
except Exception as e:
print("The SVR model failed to run.",
"Check your inputs and try again.")
print("Here is the exception message:")
print(e)
return None
print("\nSVR Predictions:\n", y_prediction, "\n")
return y_prediction
def run_nu_SVR(self):
"""Runs NuSVR model."""
if self._check_inputs():
# Initialize regression model
self.regressor_nu_SVR = self._create_SVR_model(is_nu=True)
# Split data, if needed
# If testing/training sets are still None, call _split_data()
if self.dataset_X_test is None:
self._split_data()
# Train regression model
# Handle exception if arguments are incorrect and/or if labels isn't
# quantitative data
try:
self.regressor_nu_SVR.fit(self.dataset_X_train,
self.dataset_y_train)
except Exception as e:
print("An exception occurred while training the NuSVR model.",
"Check you arguments and try again.")
print("Does labels only contain quantitative data?")
print("Here is the exception message:")
print(e)
self.regressor_nu_SVR = None
return
# Metrics
y_prediction = self.regressor_nu_SVR.predict(self.dataset_X_test)
self.mean_squared_error_nu_SVR = \
mean_squared_error(self.dataset_y_test, y_prediction)
self.r2_score_nu_SVR = \
self.regressor_nu_SVR.score(self.dataset_X_test,
self.dataset_y_test)
if self.r2_score_nu_SVR >= 0:
self.r_score_nu_SVR = sqrt(self.r2_score_nu_SVR)
self.cross_val_scores_nu_SVR = \
cross_val_score(self.regressor_nu_SVR, self.attributes,
self.labels, cv=self.cv)
# Output results
self._output_regressor_results(model="NuSVR")
def predict_nu_SVR(self, dataset_X=None):
"""Predicts the output of each datapoint in dataset_X using the
NuSVR model. Returns the predictions.
"""
# Check that run_nu_SVR() has already been called
if self.regressor_nu_SVR is None:
print("The NuSVR model seems to be missing.",
"Have you called run_nu_SVR() yet?")
return None
# Try to make the prediction; handle exception if dataset_X isn't a valid input
try:
y_prediction = self.regressor_nu_SVR.predict(dataset_X)
except Exception as e:
print("The NuSVR model failed to run.",
"Check your inputs and try again.")
print("Here is the exception message:")
print(e)
return None
print("\nNuSVR Predictions:\n", y_prediction, "\n")
return y_prediction
def run_linear_SVR(self):
"""Runs LinearSVR model."""
if self._check_inputs():
# Initialize regression model
self.regressor_linear_SVR = self._create_linear_SVR_model()
# Split data, if needed
# If testing/training sets are still None, call _split_data()
if self.dataset_X_test is None:
self._split_data()
# Train regression model
# Handle exception if arguments are incorrect and/or labels isn't
# quantitative data
try:
self.regressor_linear_SVR.fit(self.dataset_X_train,
self.dataset_y_train)
except Exception as e:
print("An exception occurred while training the LinearSVR",
"model. Check you arguments and try again.")
print("Does labels only contain quantitative data?")
print("Here is the exception message:")
print(e)
self.regressor_linear_SVR = None
return
# Metrics
y_prediction = self.regressor_linear_SVR.predict(
self.dataset_X_test)
self.mean_squared_error_linear_SVR = \
mean_squared_error(self.dataset_y_test, y_prediction)
self.r2_score_linear_SVR = \
self.regressor_linear_SVR.score(self.dataset_X_test,
self.dataset_y_test)
if self.r2_score_linear_SVR >= 0:
self.r_score_linear_SVR = sqrt(self.r2_score_linear_SVR)
self.cross_val_scores_linear_SVR = \
cross_val_score(self.regressor_linear_SVR, self.attributes,
self.labels, cv=self.cv)
# Output results
self._output_regressor_results(model="LinearSVR")
def predict_linear_SVR(self, dataset_X=None):
"""Predicts the output of each datapoint in dataset_X using the
LinearSVR model. Returns the predictions.
"""
# Check that run_linear_SVR() has already been called
if self.regressor_linear_SVR is None:
print("The LinearSVR model seems to be missing.",
"Have you called run_linear_SVR() yet?")
return None
# Try to make the prediction
# Handle exception if dataset_X isn't a valid input
try:
y_prediction = self.regressor_linear_SVR.predict(dataset_X)
except Exception as e:
print("The LinearSVR model failed to run.",
"Check your inputs and try again.")
print("Here is the exception message:")
print(e)
return None
print("\nLinearSVR Predictions:\n", y_prediction, "\n")
return y_prediction
# Helper methods
def _create_SVC_model(self, is_nu):
"""Runs UI for getting parameters and creating SVC or NuSVC
model.
"""
if is_nu:
print("\n==========================")
print("= NuSVC Parameter Inputs =")
print("==========================\n")
else:
print("\n========================")
print("= SVC Parameter Inputs =")
print("========================\n")
print("Default values:", "test_size = 0.25", "cv = 5",
"graph_results = False", sep="\n")
if is_nu:
print("nu = 0.5")
else:
print("C = 1.0")
print("kernel = 'rbf'",
"degree = 3",
"gamma = 'scale'",
"coef0 = 0.0",
"shrinking = True",
"tol = 0.001",
"cache_size = 200",
"class_weight = None",
"max_iter = -1",
"decision_function_shape = 'ovr'",
"break_ties = False",
"random_state = None",
"verbose = False", sep="\n")
# Set defaults
self.test_size = 0.25
self.cv = None
self.graph_results = False
while True:
user_input = input("\nUse default parameters (Y/n)? ").lower()
if user_input in {"y", ""}:
print("\n===========================================")
print("= End of inputs; press enter to continue. =")
input("===========================================\n")
if is_nu:
return NuSVC(probability=True)
return SVC(probability=True)
elif user_input == "n":
break
else:
print("Invalid input.")
print("\nIf you are unsure about a parameter, press enter to use its",
"default value.")
print("If you finish entering parameters early, enter 'q' to skip",
"ahead.\n")
# Set more defaults
if is_nu:
nu = 0.5
else:
C = 1.0
kernel = "rbf"
degree = 3
gamma = "scale"
coef0 = 0.0
shrinking = True
tol = 0.001
cache_size = 200
class_weight = None
max_iter = -1
decision_function_shape = "ovr"
break_ties = False
random_state = None
verbose = False
# Get user parameter input
while True:
break_early = False
while True:
user_input = input("\nWhat fraction of the dataset should be the "
+ "testing set (0,1)? ")
try:
if user_input == "":
break
elif user_input.lower() == "q":
break_early = True
break
user_input = float(user_input)
if user_input <= 0 or user_input >= 1:
raise Exception
self.test_size = user_input
break
except Exception:
print("Invalid input.")
print("test_size =", self.test_size)
if break_early:
break
while True:
user_input = input("\nUse GridSearch to find the best "
+ "hyperparameters (y/N)? ").lower()
if user_input == "q":
break_early = True
break
elif user_input in {"n", "y", ""}:
break
else:
print("Invalid input.")
if break_early:
break
while user_input == "y":
print("\n= GridSearch Parameter Inputs =\n")
print("Enter 'q' to skip GridSearch.")
self.gridsearch = True
params = {}
while True:
print("\nEnter the kernels to try out.")
print("Options: 1-'linear', 2-'poly', 3-'rbf', 4-'sigmoid'.",
"Enter 'all' for all options.")
print("Example input: 1,2,3")
user_input = input().lower()
if user_input == "q":
self.gridsearch = False
break_early = True
break
elif user_input == "all":
kern_params = ["linear", "poly", "rbf", "sigmoid"]
break
else:
kern_dict = {1: "linear", 2: "poly", 3: "rbf",
4: "sigmoid"}
try:
kern_params_int = \
list(map(int, list(user_input.split(","))))
if len(kern_params_int) > len(kern_dict):
raise Exception
kern_params = []
for each in kern_params_int:
if not kern_dict.get(each):
raise Exception
kern_params.append(kern_dict.get(each))
break
except Exception:
print("Invalid input.")
if break_early:
break
params["kernel"] = kern_params
print("kernels:", kern_params)
while True:
print("\nEnter the list of kernel coefficients/gamma values",
"to try out.")
print("Example input: 0.001,0.0001")
user_input = input().lower()
if user_input == "q":
self.gridsearch = False
break_early = True
break
try:
gamma_params = \
list(map(float, list(user_input.split(","))))
if len(gamma_params) == 0:
raise Exception
for num in gamma_params:
if num <= 0:
raise Exception
break
except Exception:
print("Invalid input.")
if break_early:
break
params["gamma"] = gamma_params
print("gammas:", gamma_params)
while not is_nu:
print("\nEnter the list of regularization parameters to",
"try out.")
print("Example input: 1,10,100")
user_input = input().lower()
if user_input == "q":
self.gridsearch = False
break_early = True
break
try:
gamma_params = \
list(map(int, list(user_input.split(","))))
if len(gamma_params) == 0:
raise Exception
for num in gamma_params:
if num <= 0:
raise Exception
params["C"] = gamma_params
print("C values:", gamma_params)
break
except Exception:
print("Invalid input.")
if break_early:
break
print("\n= End of GridSearch inputs. =")
self.gs_params = params
best_params = self._run_gridsearch_classifier(is_nu)
kernel = best_params["kernel"]
gamma = best_params["gamma"]
if not is_nu:
C = best_params["C"]
break
break_early = False
while True:
user_input = input("\nEnter the number of folds for cross "
+ "validation [2,): ")
try:
if user_input == "":
break
elif user_input.lower() == "q":
break_early = True
break
user_input = int(user_input)
if user_input < 2:
raise Exception
self.cv = user_input
break
except Exception:
print("Invalid input.")
print("cv =", self.cv)
if break_early:
break
while True:
user_input = \
input("\nGraph the ROC curve? Only binary classification "
+ "is supported (y/N): ").lower()
if user_input == "y":
self.graph_results = True
break
elif user_input in {"n", ""}:
break
elif user_input == "q":
break_early = True
break
else:
print("Invalid input.")
print("graph_results =", self.graph_results)
if break_early:
break
while is_nu:
user_input = input("\nEnter a decimal for nu (0,1]: ")
try:
if user_input == "":
break
elif user_input.lower() == "q":
break_early = True
break
user_input = float(user_input)
if user_input <= 0 or user_input > 1:
raise Exception
nu = user_input
break
except Exception:
print("Invalid input.")
if is_nu:
print("nu =", nu)
while not is_nu and not self.gridsearch:
user_input = \
input("\nEnter a positive regularization parameter C: ")
try:
if user_input == "":
break
elif user_input.lower() == "q":
break_early = True
break
user_input = float(user_input)
if user_input <= 0:
raise Exception
C = user_input
break
except Exception:
print("Invalid input.")
if not is_nu and not self.gridsearch:
print("C =", C)
if break_early:
break
while not self.gridsearch:
print("\nWhich kernel type should be used?")
user_input = \
input("Enter 1 for 'linear', 2 for 'poly', 3 for 'rbf', 4 "
+ "for 'sigmoid', or 5 for 'precomputed': ")
if user_input == "1":
kernel = "linear"
break
elif user_input == "2":
kernel = "poly"
break
elif user_input == "4":
kernel = "sigmoid"
break
elif user_input == "5":
kernel = "recomputed"
break
elif user_input in {"3", ""}:
break
elif user_input.lower() == "q":
break_early = True
break
else:
print("Invalid input.")
if not self.gridsearch:
print("kernel =", kernel)
if break_early:
break
while kernel == "poly":
user_input = \
input("\nEnter the degree of the kernel function [0,): ")
try:
if user_input == "":
break
elif user_input.lower() == "q":
break_early = True
break
user_input = int(user_input)
if user_input < 0:
raise Exception
degree = user_input
break
except Exception:
print("Invalid input.")
if kernel == "poly":
print("degree =", degree)
if break_early:
break
while kernel in {"rbf", "poly", "sigmoid"} and not self.gridsearch:
print("\nSet the kernel coefficient.")
user_input = input("Enter 1 for 'scale', or 2 for 'auto': ")
if user_input == "2":
gamma = "auto"
break
elif user_input in {"1", ""}:
break
elif user_input.lower() == "q":
break_early = True
break
else:
print("Invalid input.")
if kernel in {"rbf", "poly", "sigmoid"} and not self.gridsearch:
print("gamma =", gamma)
if break_early:
break
while kernel in {"poly", "sigmoid"}:
user_input = input("\nEnter coef0, the independent term in the "
+ "kernel function [0,): ")
try:
if user_input == "":
break
elif user_input.lower() == "q":
break_early = True
break
user_input = float(user_input)
if user_input < 0:
raise Exception
coef0 = user_input
break
except Exception:
print("Invalid input.")
if kernel in {"poly", "sigmoid"}:
print("coef0 =", coef0)
if break_early:
break
while True:
user_input = \
input("\nUse the shrinking heuristic (Y/n)? ").lower()
if user_input == "n":
shrinking = False
break
elif user_input in {"y", ""}:
break
elif user_input == "q":
break_early = True
break
else:
print("Invalid input.")
print("shrinking =", shrinking)
if break_early:
break
while True:
user_input = input("\nEnter a positive number for the "
+ "tolerance for stopping criterion: ")
try:
if user_input == "":
break
elif user_input.lower() == "q":
break_early = True
break
user_input = float(user_input)
if user_input <= 0:
raise Exception
tol = user_input
break
except Exception:
print("Invalid input.")
print("tol =", tol)
if break_early:
break
while True:
user_input = \
input("\nEnter a positive kernel cache size in MB: ")
try:
if user_input == "":
break
elif user_input.lower() == "q":
break_early = True
break
user_input = float(user_input)
if user_input <= 0:
raise Exception
cache_size = user_input
break
except Exception:
print("Invalid input.")
print("cache_size =", cache_size)
if break_early:
break
while True:
user_input = \
input("\nAutomatically adjust class weights (y/N)? ").lower()
if user_input == "y":
class_weight = "balanced"
break
elif user_input in {"n", ""}:
break
elif user_input == "q":
break_early = True
break
else:
print("Invalid input.")
print("class_weight =", class_weight)
if break_early:
break
while True:
user_input = \
input("\nEnter a positive maximum number of iterations, or "
+ "press enter for no limit: ")
try:
if user_input == "":
break
elif user_input.lower() == "q":
break_early = True
break
user_input = int(user_input)
if user_input <= 0:
raise Exception
max_iter = user_input
break
except Exception:
print("Invalid input.")
print("max_iter =", max_iter)
if break_early:
break
while True:
print("\nSet the decision function.")
user_input = \
input("Enter 1 for one-vs-rest, or 2 for one-vs-one: ")
if user_input == "2":
decision_function_shape = "ovo"
break
elif user_input in {"1", ""}:
break
elif user_input.lower() == "q":
break_early = True
break
else:
print("Invalid input.")
print("decision_function_shape =", decision_function_shape)
if break_early:
break
while True:
user_input = input("\nEnable tie-breaking (y/N)? ").lower()
if user_input == "y":
break_ties = True
break
elif user_input in {"n", ""}:
break
elif user_input == "q":
break_early = True
break
else:
print("Invalid input.")
print("break_ties =", break_ties)
if break_early:
break
while True:
user_input = \
input("\nEnter a seed for the random number generator: ")
try:
if user_input == "":
break
elif user_input.lower() == "q":
break_early = True
break
random_state = int(user_input)
break
except Exception:
print("Invalid input.")
print("random_state =", random_state)
if break_early:
break
while True:
user_input = input("\nEnable verbose logging (y/N)? ").lower()
if user_input == "y":
verbose = True
break
elif user_input in {"n", "q", ""}:
break
else:
print("Invalid input.")
print("verbose =", verbose)
break
print("\n===========================================")
print("= End of inputs; press enter to continue. =")
input("===========================================\n")
if is_nu:
return NuSVC(nu=nu, kernel=kernel, degree=degree, gamma=gamma,
coef0=coef0, shrinking=shrinking,
probability=True, tol=tol, cache_size=cache_size,
class_weight=class_weight, verbose=verbose,
max_iter=max_iter,
decision_function_shape=decision_function_shape,
break_ties=break_ties, random_state=random_state)
return SVC(C=C, kernel=kernel, degree=degree, gamma=gamma, coef0=coef0,
shrinking=shrinking, probability=True, tol=tol,
cache_size=cache_size, class_weight=class_weight,
verbose=verbose, max_iter=max_iter,
decision_function_shape=decision_function_shape,
break_ties=break_ties, random_state=random_state)
def _run_gridsearch_classifier(self, is_nu):
"""Runs GridSearch with the parameters given in run_SVC() or
run_nu_SVC(). Returns the best parameters."""
if is_nu:
clf = NuSVC()
else:
clf = SVC()
acc_scorer = make_scorer(accuracy_score)
if self.dataset_X_test is None:
self._split_data()
# Run GridSearch
grid_obj = GridSearchCV(clf, self.gs_params, scoring=acc_scorer)
grid_obj = grid_obj.fit(self.dataset_X_train, self.dataset_y_train)
# Set the clf to the best combination of parameters
clf = grid_obj.best_estimator_
# Fit the best algorithm to the data
clf.fit(self.dataset_X_train, self.dataset_y_train)
predictions = clf.predict(self.dataset_X_test)
self.gs_result = accuracy_score(self.dataset_y_test, predictions)
# Return the best parameters
print("\nBest GridSearch Parameters:\n", grid_obj.best_params_, "\n")
return grid_obj.best_params_
def _create_linear_SVC_model(self):
"""Runs UI for getting parameters and creating LinearSVC model."""
print("\n==============================")
print("= LinearSVC Parameter Inputs =")
print("==============================\n")
print("Default values:",
"test_size = 0.25",
"cv = 5",
"penalty = 'l2'",
"loss = 'squared_hinge'",
"dual = True",
"tol = 0.0001",
"C = 1.0",
"multi_class = 'ovr'",
"fit_intercept = True",
"intercept_scaling = 1",
"class_weight = None",
"random_state = None",
"max_iter = 1000",
"verbose = False", sep="\n")
# Set defaults
self.test_size = 0.25
self.cv = None
while True:
user_input = input("\nUse default parameters (Y/n)? ").lower()
if user_input in {"y", ""}:
print("\n===========================================")
print("= End of inputs; press enter to continue. =")
input("===========================================\n")
return LinearSVC()
elif user_input == "n":
break
else:
print("Invalid input.")
print("\nIf you are unsure about a parameter, press enter to use its",
"default value.")
print("If you finish entering parameters early, enter 'q' to skip",
"ahead.\n")
# Set more defaults
penalty = "l2"
loss = "squared_hinge"
dual = True
tol = 0.0001
C = 1.0
multi_class = "ovr"
fit_intercept = True
intercept_scaling = 1
class_weight = None
random_state = None
max_iter = 1000
verbose = 0
# Get user parameter input
while True:
break_early = False
while True:
user_input = input("\nWhat fraction of the dataset should be the "
+ "testing set (0,1)? ")
try:
if user_input == "":
break
elif user_input.lower() == "q":
break_early = True
break
user_input = float(user_input)
if user_input <= 0 or user_input >= 1:
raise Exception
self.test_size = user_input
break
except Exception:
print("Invalid input.")
print("test_size =", self.test_size)
if break_early:
break
while True:
user_input = input("\nEnter the number of folds for cross "
+ "validation [2,): ")
try:
if user_input == "":
break
elif user_input.lower() == "q":
break_early = True
break
user_input = int(user_input)
if user_input < 2:
raise Exception
self.cv = user_input
break
except Exception:
print("Invalid input.")
print("cv =", self.cv)
if break_early:
break
while True:
user_input = input("\nCalculate a y-intercept (Y/n)? ").lower()
if user_input == "n":
fit_intercept = False
break
elif user_input in {"y", ""}:
break
elif user_input == "q":
break_early = True
break
else:
print("Invalid input.")
print("fit_intercept =", fit_intercept)
if break_early:
break
while fit_intercept:
user_input = \
input("\nEnter a number for the intercept scaling factor: ")
try:
if user_input == "":
break
elif user_input.lower() == "q":
break_early = True
break
intercept_scaling = float(user_input)
except Exception:
print("Invalid input.")
if fit_intercept:
print("intercept_scaling =", intercept_scaling)
if break_early:
break
while True:
print("\nSet the norm used in penalization.")
user_input = input("Enter 1 for 'l1', or 2 for 'l2': ")
if user_input == "1":
penalty = "l1"
break
elif user_input in {"2", ""}:
break
elif user_input.lower() == "q":
break_early = True
break
else:
print("Invalid input.")
print("penalty =", penalty)
if break_early:
break
while True:
print("\nChoose a loss function.")
user_input = \
input("Enter 1 for 'hinge', or 2 for 'squared_hinge': ")
if user_input == "1":
loss = "hinge"
break
elif user_input in {"2", ""}:
break
elif user_input.lower() == "q":
break_early = True
break
else:
print("Invalid input.")
print("loss =", loss)
if break_early:
break
while True:
print("\nShould the algorithm solve the duel or primal",
"optimization problem?")
user_input = input("Enter 1 for dual, or 2 for primal: ")
if user_input == "2":
dual = False
break
elif user_input in {"1", ""}:
break
elif user_input.lower() == "q":
break_early = True
break
else:
print("Invalid input.")
print("dual =", dual)
if break_early:
break
while True:
user_input = input("\nEnter a positive tolerance for stopping "
+ "criterion: ")
try:
if user_input == "":
break
elif user_input.lower() == "q":
break_early = True
break
user_input = float(user_input)
if user_input <= 0:
raise Exception
tol = user_input
break
except Exception:
print("Invalid input.")
print("tol =", tol)
if break_early:
break
while True:
user_input = \
input("\nEnter a positive regularization parameter C: ")
try:
if user_input == "":
break
elif user_input.lower() == "q":
break_early = True
break
user_input = float(user_input)
if user_input <= 0:
raise Exception
C = user_input
break
except Exception:
print("Invalid input.")
print("C =", C)
if break_early:
break
while True:
print("\nSet the multi-class strategy if there are more than",
"two classes.")
user_input = input("Enter 1 for 'one-vs-rest', or 2 for "
+ "'Crammer-Singer': ")
if user_input == "2":
multi_class = "crammer_singer"
break
elif user_input in {"1", ""}:
break
elif user_input.lower() == "q":
break_early = True
break
else:
print("Invalid input.")
print("multi_class =", multi_class)
if break_early:
break
while True:
user_input = input("\nAutomatically adjust class weights "
+ "(y/N)? ").lower()
if user_input == "y":
class_weight = "balanced"
break
elif user_input in {"n", ""}:
break
elif user_input == "q":
break_early = True
break
else:
print("Invalid input.")
print("class_weight =", class_weight)
if break_early:
break
while True:
user_input = \
input("\nEnter a positive maximum number of iterations: ")
try:
if user_input == "":
break
elif user_input.lower() == "q":
break_early = True
break
user_input = int(user_input)
if user_input <= 0:
raise Exception
max_iter = user_input
break
except Exception:
print("Invalid input.")
print("max_iter =", max_iter)
if break_early:
break
while True:
user_input = \
input("\nEnter a seed for the random number generator: ")
try:
if user_input == "":
break
elif user_input.lower() == "q":
break_early = True
break
random_state = int(user_input)
break
except Exception:
print("Invalid input.")
print("random_state =", random_state)
if break_early:
break
while True:
user_input = input("\nEnable verbose logging (y/N)? ").lower()
if user_input == "y":
verbose = True
break
elif user_input in {"n", "q", ""}:
break
else:
print("Invalid input.")
print("verbose =", verbose)
break
print("\n===========================================")
print("= End of inputs; press enter to continue. =")
input("===========================================\n")
return LinearSVC(penalty=penalty, loss=loss, dual=dual, tol=tol, C=C,
multi_class=multi_class, fit_intercept=fit_intercept,
intercept_scaling=intercept_scaling,
class_weight=class_weight, verbose=verbose,
random_state=random_state, max_iter=max_iter)
def _create_SVR_model(self, is_nu):
"""Runs UI for getting parameters and creates SVR or NuSVR model."""
if is_nu:
print("\n==========================")
print("= NuSVR Parameter Inputs =")
print("==========================\n")
else:
print("\n========================")
print("= SVR Parameter Inputs =")
print("========================\n")
print("Default values:", "test_size = 0.25", "cv = 5", sep="\n")
if is_nu:
print("nu = 0.5")
else:
print("epsilon = 0.1")
print("kernel = 'rbf'",
"degree = 3",
"gamma = 'scale'",
"coef0 = 0.0",
"tol = 0.001",
"C = 1.0",
"shrinking = True",
"cache_size = 200",
"verbose = False",
"max_iter = -1", sep="\n")
# Set defaults
self.test_size = 0.25
self.cv = None
while True:
user_input = input("\nUse default parameters (Y/n)? ").lower()
if user_input in {"y", ""}:
print("\n===========================================")
print("= End of inputs; press enter to continue. =")
input("===========================================\n")
if is_nu:
return NuSVR()
return SVR()
elif user_input == "n":
break
else:
print("Invalid input.")
print("\nIf you are unsure about a parameter, press enter to use its",
"default value.")
print("If you finish entering parameters early, enter 'q' to skip",
"ahead.\n")
# Set more defaults
if is_nu:
nu = 0.5
else:
epsilon = 0.1
kernel = "rbf"
degree = 3
gamma = "scale"
coef0 = 0.0
tol = 0.001
C = 1.0
shrinking = True
cache_size = 200
verbose = False
max_iter = -1
# Get user parameter input
while True:
break_early = False
while True:
user_input = input("\nWhat fraction of the dataset should be the "
+ "testing set (0,1)? ")
try:
if user_input == "":
break
elif user_input.lower() == "q":
break_early = True
break
user_input = float(user_input)
if user_input <= 0 or user_input >= 1:
raise Exception
self.test_size = user_input
break
except Exception:
print("Invalid input.")
print("test_size =", self.test_size)
if break_early:
break
while True:
user_input = input("\nUse GridSearch to find the best "
+ "hyperparameters (y/N)? ").lower()
if user_input == "q":
break_early = True
break
elif user_input in {"n", "y", ""}:
break
else:
print("Invalid input.")
if break_early:
break
while user_input == "y":
print("\n= GridSearch Parameter Inputs =\n")
print("Enter 'q' to skip GridSearch.")
self.gridsearch = True
params = {}
while True:
print("\nEnter the kernels to try out.")
print("Options: 1-'linear', 2-'poly', 3-'rbf', 4-'sigmoid'.",
"Enter 'all' for all options.")
print("Example input: 1,2,3")
user_input = input().lower()
if user_input == "q":
self.gridsearch = False
break_early = True
break
elif user_input == "all":
kern_params = ["linear", "poly", "rbf", "sigmoid"]
break
else:
kern_dict = {1: "linear", 2: "poly", 3: "rbf",
4: "sigmoid"}
try:
kern_params_int = \
list(map(int, list(user_input.split(","))))
if len(kern_params_int) > len(kern_dict):
raise Exception
kern_params = []
for each in kern_params_int:
if not kern_dict.get(each):
raise Exception
kern_params.append(kern_dict.get(each))
break
except Exception:
print("Invalid input.")
if break_early:
break
params["kernel"] = kern_params
print("kernels:", kern_params)
while is_nu:
print("\nEnter a list of decimals for nu.")
print("Example input: 0.1,0.2,0.3")
user_input = input().lower()
if user_input == "q":
self.gridsearch = False
break_early = True
break
try:
nu_params = \
list(map(float, list(user_input.split(","))))
if len(nu_params) == 0:
raise Exception
for num in nu_params:
if num <= 0:
raise Exception
params["nu"] = nu_params
print("nu values:", nu_params)
break
except Exception:
print("Invalid input.")
while not is_nu:
print("\nEnter epsilons (the ranges from an actual value",
"where penalties aren't applied) to try out.")
print("Example input: 0.1,0.2,0.3")
user_input = input().lower()
if user_input == "q":
self.gridsearch = False
break_early = True
break
try:
eps_params = \
list(map(float, list(user_input.split(","))))
if len(eps_params) == 0:
raise Exception
for num in eps_params:
if num <= 0:
raise Exception
params["epsilon"] = eps_params
print("epsilon values:", eps_params)
break
except Exception:
print("Invalid input.")
if break_early:
break
while not is_nu:
print("\nEnter the list of regularization parameters to",
"try out.")
print("Example input: 1,10,100")
user_input = input().lower()
if user_input == "q":
self.gridsearch = False
break_early = True
break
try:
gamma_params = \
list(map(int, list(user_input.split(","))))
if len(gamma_params) == 0:
raise Exception
for num in gamma_params:
if num <= 0:
raise Exception
params["C"] = gamma_params
print("C values:", gamma_params)
break
except Exception:
print("Invalid input.")
if break_early:
break
print("\n= End of GridSearch inputs. =")
self.gs_params = params
best_params = self._run_gridsearch_regressor(is_nu)
kernel = best_params["kernel"]
if is_nu:
nu = best_params["nu"]
else:
C = best_params["C"]
epsilon = best_params["epsilon"]
break
break_early = False
while True:
user_input = input("\nEnter the number of folds for cross "
+ "validation [2,): ")
try:
if user_input == "":
break
elif user_input.lower() == "q":
break_early = True
break
user_input = int(user_input)
if user_input < 2:
raise Exception
self.cv = user_input
break
except Exception:
print("Invalid input.")
print("cv =", self.cv)
if break_early:
break
while is_nu and not self.gridsearch:
user_input = input("\nEnter a decimal for nu (0,1]: ")
try:
if user_input == "":
break
elif user_input.lower() == "q":
break_early = True
break
user_input = float(user_input)
if user_input <= 0 or user_input > 1:
raise Exception
nu = user_input
break
except Exception:
print("Invalid input.")
if is_nu and not self.gridsearch:
print("nu =", nu)
while not is_nu and not self.gridsearch:
user_input = \
input("\nEnter a positive epsilon, the range from an actual "
+ "value where penalties aren't applied: ")
try:
if user_input == "":
break
elif user_input.lower() == "q":
break_early = True
break
user_input = float(user_input)
if user_input <= 0:
raise Exception
epsilon = user_input
break
except Exception:
print("Invalid input.")
if not is_nu and not self.gridsearch:
print("epsilon =", epsilon)
if break_early:
break
while not self.gridsearch:
print("\nWhich kernel type should be used?")
user_input = \
input("Enter 1 for 'linear', 2 for 'poly', 3 for 'rbf', 4 "
+ "for 'sigmoid', or 5 for 'precomputed': ")
if user_input == "1":
kernel = "linear"
break
elif user_input == "2":
kernel = "poly"
break
elif user_input == "4":
kernel = "sigmoid"
break
elif user_input == "5":
kernel = "recomputed"
break
elif user_input in {"3", ""}:
break
elif user_input.lower() == "q":
break_early = True
break
else:
print("Invalid input.")
if not self.gridsearch:
print("kernel =", kernel)
if break_early:
break
while kernel == "poly":
user_input = \
input("\nEnter the degree of the kernel function [0,): ")
try:
if user_input == "":
break
elif user_input.lower() == "q":
break_early = True
break
user_input = int(user_input)
if user_input < 0:
raise Exception
degree = user_input
break
except Exception:
print("Invalid input.")
if kernel == "poly":
print("degree =", degree)
if break_early:
break
while kernel in {"rbf", "poly", "sigmoid"}:
print("\nSet the kernel coefficient.")
user_input = input("Enter 1 for 'scale', or 2 for 'auto': ")
if user_input == "2":
gamma = "auto"
break
elif user_input in {"1", ""}:
break
elif user_input.lower() == "q":
break_early = True
break
else:
print("Invalid input.")
if kernel in {"rbf", "poly", "sigmoid"}:
print("gamma =", gamma)
if break_early:
break
while not self.gridsearch or is_nu:
user_input = \
input("\nEnter a positive regularization parameter C: ")
try:
if user_input == "":
break
elif user_input.lower() == "q":
break_early = True
break
user_input = float(user_input)
if user_input <= 0:
raise Exception
C = user_input
break
except Exception:
print("Invalid input.")
if not self.gridsearch or is_nu:
print("C =", C)
if break_early:
break
while kernel in {"poly", "sigmoid"}:
user_input = input("\nEnter coef0, the independent term in the "
+ "kernel function [0,): ")
try:
if user_input == "":
break
elif user_input.lower() == "q":
break_early = True
break
user_input = float(user_input)
if user_input < 0:
raise Exception
coef0 = user_input
break
except Exception:
print("Invalid input.")
if kernel in {"poly", "sigmoid"}:
print("coef0 =", coef0)
if break_early:
break
while True:
user_input = input("\nEnter a positive tolerance for stopping "
+ "criterion: ")
try:
if user_input == "":
break
elif user_input.lower() == "q":
break_early = True
break
user_input = float(user_input)
if user_input <= 0:
raise Exception
tol = user_input
break
except Exception:
print("Invalid input.")
print("tol =", tol)
if break_early:
break
while True:
user_input = \
input("\nUse the shrinking heuristic (Y/n)? ").lower()
if user_input == "n":
shrinking = False
break
elif user_input in {"y", ""}:
break
elif user_input == "q":
break_early = True
break
else:
print("Invalid input.")
print("shrinking =", shrinking)
if break_early:
break
while True:
user_input = \
input("\nEnter a positive kernel cache size in MB: ")
try:
if user_input == "":
break
elif user_input.lower() == "q":
break_early = True
break
user_input = float(user_input)
if user_input <= 0:
raise Exception
cache_size = user_input
break
except Exception:
print("Invalid input.")
print("cache_size =", cache_size)
if break_early:
break
while True:
user_input = \
input("\nEnter a positive maximum number of iterations, or "
+ "press enter for no limit: ")
try:
if user_input == "":
break
elif user_input.lower() == "q":
break_early = True
break
user_input = int(user_input)
if user_input <= 0:
raise Exception
max_iter = user_input
break
except Exception:
print("Invalid input.")
print("max_iter =", max_iter)
if break_early:
break
while True:
user_input = input("\nEnable verbose logging (y/N)? ").lower()
if user_input == "y":
verbose = True
break
elif user_input in {"n", "q", ""}:
break
else:
print("Invalid input.")
print("verbose =", verbose)
break
print("\n===========================================")
print("= End of inputs; press enter to continue. =")
input("===========================================\n")
if is_nu:
return NuSVR(nu=nu, C=C, kernel=kernel, degree=degree, gamma=gamma,
coef0=coef0, shrinking=shrinking, tol=tol,
cache_size=cache_size, verbose=verbose,
max_iter=max_iter)
return SVR(kernel=kernel, degree=degree, gamma=gamma, coef0=coef0,
tol=tol, C=C, epsilon=epsilon, shrinking=shrinking,
cache_size=cache_size, verbose=verbose, max_iter=max_iter)
def _run_gridsearch_regressor(self, is_nu):
"""Runs GridSearch with the parameters given in run_SVR() or
run_nu_SVR(). Returns the best parameters."""
if is_nu:
clf = NuSVR()
else:
clf = SVR()
if self.dataset_X_test is None:
self._split_data()
# Run GridSearch
grid_obj = GridSearchCV(clf, self.gs_params, scoring="r2")
grid_obj = grid_obj.fit(self.dataset_X_train, self.dataset_y_train)
# Set the clf to the best combination of parameters
clf = grid_obj.best_estimator_
# Fit the best algorithm to the data
clf.fit(self.dataset_X_train, self.dataset_y_train)
self.gs_result = clf.score(self.dataset_X_test, self.dataset_y_test)
# Return the best parameters
print("\nBest GridSearch Parameters:\n", grid_obj.best_params_, "\n")
return grid_obj.best_params_
def _create_linear_SVR_model(self):
"""Runs UI for getting parameters and creates LinearSVR model."""
print("\n==============================")
print("= LinearSVR Parameter Inputs =")
print("==============================\n")
print("Default values:",
"test_size = 0.25",
"cv = 5",
"epsilon = 0.0",
"tol = 0.0001",
"C = 1.0",
"loss = 'epsilon_insensitive'",
"fit_intercept = True",
"intercept_scaling = 1.0",
"dual = True",
"random_state = None",
"max_iter = 1000",
"verbose = False", sep="\n")
# Set defaults
self.test_size = 0.25
self.cv = None
while True:
user_input = input("\nUse default parameters (Y/n)? ").lower()
if user_input in {"y", ""}:
print("\n===========================================")
print("= End of inputs; press enter to continue. =")
input("===========================================\n")
return LinearSVR()
elif user_input == "n":
break
else:
print("Invalid input.")
print("\nIf you are unsure about a parameter, press enter to use its",
"default value.")
print("If you finish entering parameters early, enter 'q' to skip",
"ahead.\n")
# Set more defaults
epsilon = 0.0
tol = 0.0001
C = 1.0
loss = "epsilon_insensitive"
fit_intercept = True
intercept_scaling = 1.0
dual = True
random_state = None
max_iter = 1000
verbose = 0
# Get user parameter input
while True:
break_early = False
while True:
user_input = input("\nWhat fraction of the dataset should be the "
+ "testing set (0,1)? ")
try:
if user_input == "":
break
elif user_input.lower() == "q":
break_early = True
break
user_input = float(user_input)
if user_input <= 0 or user_input >= 1:
raise Exception
self.test_size = user_input
break
except Exception:
print("Invalid input.")
print("test_size =", self.test_size)
if break_early:
break
while True:
user_input = input("\nEnter the number of folds for cross "
+ "validation [2,): ")
try:
if user_input == "":
break
elif user_input.lower() == "q":
break_early = True
break
user_input = int(user_input)
if user_input < 2:
raise Exception
self.cv = user_input
break
except Exception:
print("Invalid input.")
print("cv =", self.cv)
if break_early:
break
while True:
user_input = \
input("\nEnter a positive epsilon, the range from an actual "
+ "value where penalties aren't applied: ")
try:
if user_input == "":
break
elif user_input.lower() == "q":
break_early = True
break
user_input = float(user_input)
if user_input <= 0:
raise Exception
epsilon = user_input
break
except Exception:
print("Invalid input.")
print("epsilon =", epsilon)
if break_early:
break
while True:
user_input = input("\nEnter a positive tolerance for stopping "
+ "criterion: ")
try:
if user_input == "":
break
elif user_input.lower() == "q":
break_early = True
break
user_input = float(user_input)
if user_input <= 0:
raise Exception
tol = user_input
break
except Exception:
print("Invalid input.")
print("tol =", tol)
if break_early:
break
while True:
user_input = \
input("\nEnter a positive regularization parameter C: ")
try:
if user_input == "":
break
elif user_input.lower() == "q":
break_early = True
break
user_input = float(user_input)
if user_input <= 0:
raise Exception
C = user_input
break
except Exception:
print("Invalid input.")
print("C =", C)
if break_early:
break
while True:
print("\nChoose a loss function.")
user_input = \
input("\nEnter 1 for 'epsilon_insensitive', or 2 for "
+ "'squared_epsilon_insensitive': ")
if user_input == "2":
loss = "squared_epsilon_insensitive"
break
elif user_input in {"1", ""}:
break
elif user_input.lower() == "q":
break_early = True
break
else:
print("Invalid input.")
print("loss =", loss)
if break_early:
break
while True:
user_input = input("\nCalculate a y-intercept (Y/n)? ").lower()
if user_input == "n":
fit_intercept = False
break
elif user_input in {"y", ""}:
break
elif user_input == "q":
break_early = True
break
else:
print("Invalid input.")
print("fit_intercept =", fit_intercept)
if break_early:
break
while fit_intercept:
user_input = \
input("\nEnter a number for the intercept scaling factor: ")
try:
if user_input == "":
break
elif user_input.lower() == "q":
break_early = True
break
intercept_scaling = float(user_input)
except Exception:
print("Invalid input.")
if fit_intercept:
print("intercept_scaling =", intercept_scaling)
if break_early:
break
while True:
print("\nShould the algorithm solve the duel or primal",
"optimization problem?")
user_input = input("Enter 1 for dual, or 2 for primal: ")
if user_input == "2":
dual = False
break
elif user_input in {"1", ""}:
break
elif user_input.lower() == "q":
break_early = True
break
else:
print("Invalid input.")
print("dual =", dual)
if break_early:
break
while True:
user_input = \
input("\nEnter a positive maximum number of iterations: ")
try:
if user_input == "":
break
elif user_input.lower() == "q":
break_early = True
break
user_input = int(user_input)
if user_input <= 0:
raise Exception
max_iter = user_input
break
except Exception:
print("Invalid input.")
print("max_iter =", max_iter)
if break_early:
break
while True:
user_input = \
input("\nEnter a seed for the random number generator: ")
try:
if user_input == "":
break
elif user_input.lower() == "q":
break_early = True
break
random_state = int(user_input)
break
except Exception:
print("Invalid input.")
print("random_state =", random_state)
if break_early:
break
while True:
user_input = input("\nEnable verbose logging (y/N)? ").lower()
if user_input == "y":
verbose = True
break
elif user_input in {"n", "q", ""}:
break
else:
print("Invalid input.")
print("verbose =", verbose)
break
print("\n===========================================")
print("= End of inputs; press enter to continue. =")
input("===========================================\n")
return LinearSVR(epsilon=epsilon, tol=tol, C=C, loss=loss,
fit_intercept=fit_intercept,
intercept_scaling=intercept_scaling, dual=dual,
verbose=verbose, random_state=random_state,
max_iter=max_iter)
def _output_classifier_results(self, model):
"""Outputs model metrics after a classifier model finishes running."""
if model == "SVC":
print("\n===============")
print("= SVC Results =")
print("===============\n")
print("{:<20} {:<20}".format("Accuracy:", self.accuracy_SVC))
if self.bin:
print("\n{:<20} {:<20}".format("ROC AUC:", self.roc_auc_SVC))
else:
print("\nConfusion Matrix:\n", self.confusion_matrix_SVC)
print("\nCross Validation Scores:", self.cross_val_scores_SVC)
if self.gridsearch:
print("\n{:<20} {:<20}".format("GridSearch Score:",
self.gs_result))
if self.bin and self.graph_results:
plt.plot(self.fpr, self.tpr, label="data 1")
plt.xlabel("False Positive Rate")
plt.ylabel("True Positive Rate")
plt.title("ROC Curve")
plt.legend(loc=4)
plt.show()
print("\n\nCall predict_SVC() to make predictions for new data.")
elif model == "NuSVC":
print("\n=================")
print("= NuSVC Results =")
print("=================\n")
print("{:<20} {:<20}".format("Accuracy:", self.accuracy_nu_SVC))
if self.bin:
print("\n{:<20} {:<20}".format(
"ROC AUC:", self.roc_auc_nu_SVC))
else:
print("\nConfusion Matrix:\n", self.confusion_matrix_nu_SVC)
print("\nCross Validation Scores:", self.cross_val_scores_nu_SVC)
if self.gridsearch:
print("\n{:<20} {:<20}".format("GridSearch Score:",
self.gs_result))
if self.bin and self.graph_results:
plt.plot(self.fpr, self.tpr, label="data 1")
plt.xlabel("False Positive Rate")
plt.ylabel("True Positive Rate")
plt.title("ROC Curve")
plt.legend(loc=4)
plt.show()
print("\n\nCall predict_nu_SVC() to make predictions for new data.")
else:
print("\n=====================")
print("= LinearSVC Results =")
print("=====================\n")
print("{:<20} {:<20}".format(
"Accuracy:", self.accuracy_linear_SVC))
print("\nCross Validation Scores:",
self.cross_val_scores_linear_SVC)
print("\n\nCall predict_linear_SVC() to make predictions for",
"new data.")
print("\n===================")
print("= End of results. =")
print("===================\n")
def _output_regressor_results(self, model):
"""Outputs model metrics after a regressor model finishes running."""
if model == "SVR":
print("\n===============")
print("= SVR Results =")
print("===============\n")
print("{:<20} {:<20}".format("Mean Squared Error:",
self.mean_squared_error_SVR))
print("\n{:<20} {:<20}".format("R2 Score:", self.r2_score_SVR))
print("\n{:<20} {:<20}".format("R Score:", str(self.r_score_SVR)))
print("\nCross Validation Scores", self.cross_val_scores_SVR)
if self.gridsearch:
print("\n{:<20} {:<20}".format("GridSearch Score:",
self.gs_result))
print("\n\nCall predict_SVR() to make predictions for new data.")
elif model == "NuSVR":
print("\n=================")
print("= NuSVR Results =")
print("=================\n")
print("{:<20} {:<20}".format("Mean Squared Error:",
self.mean_squared_error_nu_SVR))
print("\n{:<20} {:<20}".format("R2 Score:", self.r2_score_nu_SVR))
print("\n{:<20} {:<20}".format(
"R Score:", str(self.r_score_nu_SVR)))
print("\nCross Validation Scores:", self.cross_val_scores_nu_SVR)
if self.gridsearch:
print("\n{:<20} {:<20}".format("GridSearch Score:",
self.gs_result))
print("\n\nCall predict_nu_SVR() to make predictions for new data.")
else:
print("\n=====================")
print("= LinearSVR Results =")
print("=====================\n")
print("{:<20} {:<20}".format("Mean Squared Error:",
self.mean_squared_error_linear_SVR))
print("\n{:<20} {:<20}".format("R2 Score:",
self.r2_score_linear_SVR))
print("\n{:<20} {:<20}".format("R Score:",
str(self.r_score_linear_SVR)))
print("\nCross Validation Scores:\n",
self.cross_val_scores_linear_SVR)
print("\n\nCall predict_linear_SVR() to make predictions for new data.")
print("\n===================")
print("= End of results. =")
print("===================\n")
def _split_data(self):
"""Helper method for splitting attributes and labels into
training and testing sets.
This method runs under the assumption that all relevant instance
data has been checked for correctness.
"""
self.dataset_X_train, self.dataset_X_test, self.dataset_y_train, \
self.dataset_y_test = train_test_split(self.attributes, self.labels,
test_size=self.test_size)
def _check_inputs(self):
"""Verifies if instance data is ready for use in SVM model."""
# Check if attributes exists
if self.attributes is None:
print("attributes is missing; call set_attributes(new_attributes)",
"to fix this! new_attributes should be a populated dataset",
"of independent variables.")
return False
# Check if labels exists
if self.labels is None:
print("labels is missing; call set_labels(new_labels) to fix this!",
"new_labels should be a populated dataset of classes.")
return False
# Check if attributes and labels have same number of rows (samples)
if self.attributes.shape[0] != self.labels.shape[0]:
print("attributes and labels don't have the same number of rows.",
"Make sure the number of samples in each dataset matches!")
return False
return True
| 35.06866
| 87
| 0.449701
| 9,088
| 95,001
| 4.50154
| 0.045885
| 0.082058
| 0.027157
| 0.038279
| 0.881716
| 0.853141
| 0.805622
| 0.780958
| 0.742215
| 0.723026
| 0
| 0.009028
| 0.455479
| 95,001
| 2,708
| 88
| 35.08161
| 0.781809
| 0.065936
| 0
| 0.789448
| 0
| 0
| 0.156667
| 0.016306
| 0
| 0
| 0
| 0
| 0
| 1
| 0.026621
| false
| 0
| 0.00242
| 0
| 0.064376
| 0.161181
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 7
|
23c88a88bff346586761330bf4fb83822ede6130
| 8,567
|
py
|
Python
|
tests/unit/cirrus/plugins/builders/pip_virtualenv_test.py
|
Maxsparrow/cirrus
|
ae9639daba4f2d8d9285e98d5b11a89eac573f96
|
[
"Apache-2.0"
] | 12
|
2016-04-30T16:13:55.000Z
|
2021-01-20T23:42:31.000Z
|
tests/unit/cirrus/plugins/builders/pip_virtualenv_test.py
|
Maxsparrow/cirrus
|
ae9639daba4f2d8d9285e98d5b11a89eac573f96
|
[
"Apache-2.0"
] | 153
|
2015-02-12T15:25:42.000Z
|
2020-03-09T07:16:15.000Z
|
tests/unit/cirrus/plugins/builders/pip_virtualenv_test.py
|
Maxsparrow/cirrus
|
ae9639daba4f2d8d9285e98d5b11a89eac573f96
|
[
"Apache-2.0"
] | 7
|
2015-06-15T21:30:38.000Z
|
2020-02-17T02:13:00.000Z
|
#!/usr/bin/env python
"""
tests for VirtualenvPip builder plugin
"""
import unittest
import mock
import tempfile
import os
from cirrus.build import FACTORY
PYPIRC = \
"""
[distutils]
index-servers =
pypi
devpi
[pypi]
repository: https://pypi.python.org/pypi/
username: steve
password: stevespass
[devpi]
repository: https://localhost:4000
username: the_steve
password: stevespass
"""
class VenvPipBuilderTest(unittest.TestCase):
@mock.patch('cirrus.builder_plugin.load_configuration')
@mock.patch('cirrus.builder_plugin.repo_directory')
@mock.patch('cirrus.plugins.builders.venv_pip.VirtualEnvironment')
@mock.patch('cirrus.plugins.builders.venv_pip.local')
@mock.patch('cirrus.builder_plugin.local')
@mock.patch('cirrus.plugins.builders.venv_pip.build_pip_command')
def test_builder(self, mock_pip, mock_base_local, mock_local, mock_venv_cls, mock_repo_dir, mock_load_conf):
mock_pip.return_value = "PIP_COMMAND"
mock_repo_dir.return_value = "REPO"
mock_venv = mock.Mock()
mock_venv.open_or_create = mock.Mock()
mock_venv_cls.return_value = mock_venv
mock_conf = mock.Mock(name="load_configuration")
mock_conf.get = mock.Mock(return_value={
'build': {'builder': 'conf'},
'extra_requirements': ['test-requirements.txt', 'more-reqs.txt']
})
mock_load_conf.return_value = mock_conf
plugin = FACTORY('VirtualenvPip')
plugin.create(clean=True)
plugin.activate()
mock_base_local.assert_has_calls([
mock.call('. REPO/venv/bin/activate && python setup.py develop')
])
mock_local.assert_has_calls([
mock.call('PIP_COMMAND'),
mock.call('PIP_COMMAND'),
mock.call('PIP_COMMAND')
])
mock_base_local.reset_mock()
plugin.create(clean=True, nosetupdevelop=True)
self.failUnless(not mock_base_local.called)
@mock.patch('cirrus.builder_plugin.load_configuration')
@mock.patch('cirrus.builder_plugin.repo_directory')
@mock.patch('cirrus.plugins.builders.venv_pip.VirtualEnvironment')
@mock.patch('cirrus.plugins.builders.venv_pip.local')
@mock.patch('cirrus.builder_plugin.local')
@mock.patch('cirrus.plugins.builders.venv_pip.build_pip_command')
def test_builder_extras_require(
self,
mock_pip,
mock_base_local,
mock_local,
mock_venv_cls,
mock_repo_dir,
mock_load_conf
):
mock_pip.return_value = "PIP_COMMAND"
mock_repo_dir.return_value = "REPO"
mock_venv = mock.Mock()
mock_venv.open_or_create = mock.Mock()
mock_venv_cls.return_value = mock_venv
mock_conf = mock.Mock(name="load_configuration")
mock_conf.get = mock.Mock(return_value={
'build': {'builder': 'conf'},
'extra_requirements': ['test-requirements.txt', 'more-reqs.txt']
})
mock_conf.extras_require = mock.Mock(
return_value={'ALL': None, 'SERVER': None}
)
mock_conf.package_name = mock.Mock(return_value='PACKAGE')
mock_load_conf.return_value = mock_conf
plugin = FACTORY('VirtualenvPip')
plugin.create(clean=True, extras_require=['ALL', 'SERVER'])
plugin.activate()
mock_base_local.assert_has_calls([
mock.call('. REPO/venv/bin/activate && python setup.py develop')
])
mock_local.assert_has_calls([
mock.call('PIP_COMMAND'),
mock.call('PIP_COMMAND'),
mock.call('PIP_COMMAND'),
mock.call('. REPO/venv/bin/activate && pip install PACKAGE[ALL]'),
mock.call('. REPO/venv/bin/activate && pip install PACKAGE[SERVER]')
])
mock_base_local.reset_mock()
plugin = FACTORY('VirtualenvPip')
plugin.create(clean=True, all_extras=True)
plugin.activate()
mock_base_local.assert_has_calls([
mock.call('. REPO/venv/bin/activate && python setup.py develop')
])
mock_local.assert_has_calls([
mock.call('PIP_COMMAND'),
mock.call('PIP_COMMAND'),
mock.call('PIP_COMMAND'),
mock.call('. REPO/venv/bin/activate && pip install PACKAGE[ALL]'),
mock.call('. REPO/venv/bin/activate && pip install PACKAGE[SERVER]')
])
mock_base_local.reset_mock()
plugin.create(clean=True, nosetupdevelop=True)
self.failUnless(not mock_base_local.called)
@mock.patch('cirrus.builder_plugin.load_configuration')
@mock.patch('cirrus.builder_plugin.repo_directory')
@mock.patch('cirrus.plugins.builders.venv_pip.VirtualEnvironment')
@mock.patch('cirrus.plugins.builders.venv_pip.local')
@mock.patch('cirrus.builder_plugin.local')
@mock.patch('cirrus.plugins.builders.venv_pip.build_pip_command')
def test_builder_python_bin(self, mock_pip, mock_base_local, mock_local, mock_venv_cls, mock_repo_dir, mock_load_conf):
mock_pip.return_value = "PIP_COMMAND"
mock_repo_dir.return_value = "REPO"
mock_venv = mock.Mock()
mock_venv.open_or_create = mock.Mock()
mock_venv_cls.return_value = mock_venv
mock_conf = mock.Mock(name="load_configuration")
mock_conf.get = mock.Mock(return_value={
'build': {'builder': 'conf'},
'extra_requirements': ['test-requirements.txt', 'more-reqs.txt'],
'python': 'python6.7'
})
mock_load_conf.return_value = mock_conf
plugin = FACTORY('VirtualenvPip')
plugin.create(clean=True)
mock_venv_cls.assert_has_calls([
mock.call('REPO/venv', python='python6.7', system_site_packages=False)
])
# verify conda style python version works too
mock_venv_cls.reset_mock()
mock_conf = mock.Mock(name="load_configuration")
mock_conf.get = mock.Mock(return_value={
'build': {'builder': 'conf'},
'extra_requirements': ['test-requirements.txt', 'more-reqs.txt'],
'python': '6.7'
})
mock_load_conf.return_value = mock_conf
plugin2 = FACTORY('VirtualenvPip')
plugin2.create(clean=True)
mock_venv_cls.assert_has_calls([
mock.call('REPO/venv', python='python6.7', system_site_packages=False)
])
@mock.patch('cirrus.builder_plugin.load_configuration')
@mock.patch('cirrus.builder_plugin.repo_directory')
@mock.patch('cirrus.plugins.builders.venv_pip.VirtualEnvironment')
@mock.patch('cirrus.plugins.builders.venv_pip.local')
@mock.patch('cirrus.builder_plugin.local')
@mock.patch('cirrus.plugins.builders.venv_pip.build_pip_command')
def test_builder_errors(self, mock_pip, mock_base_local, mock_local, mock_venv_cls, mock_repo_dir, mock_load_conf):
mock_pip.return_value = "PIP_COMMAND"
mock_repo_dir.return_value = "REPO"
mock_venv = mock.Mock()
mock_local.side_effect = OSError("BOOM")
mock_venv.open_or_create = mock.Mock()
mock_venv_cls.return_value = mock_venv
mock_conf = mock.Mock(name="load_configuration")
mock_conf.get = mock.Mock(return_value={
'build': {'builder': 'conf'},
'extra_requirements': ['test-requirements.txt', 'more-reqs.txt']
})
mock_load_conf.return_value = mock_conf
plugin = FACTORY('VirtualenvPip')
self.assertRaises(OSError, plugin.create, clean=True)
mock_local.side_effect = [None, OSError("BOOM")]
self.assertRaises(OSError, plugin.create, clean=True)
@mock.patch('cirrus.builder_plugin.load_configuration')
@mock.patch('cirrus.builder_plugin.repo_directory')
@mock.patch('cirrus.plugins.builders.venv_pip.local')
@mock.patch('cirrus.plugins.builders.venv_pip.os.path.exists')
def test_builder_clean(self, mock_exists, mock_local, mock_repo, mock_load_conf):
mock_exists.return_value = True
mock_repo.return_value = "REPO"
mock_conf = mock.Mock(name="load_configuration")
mock_conf.get = mock.Mock(return_value={
'build': {'builder': 'conf'},
'extra_requirements': ['test-requirements.txt', 'more-reqs.txt'],
'python': 'python6.7',
'virtualenv_name': 'venv'
})
mock_load_conf.return_value = mock_conf
plugin = FACTORY('VirtualenvPip')
plugin.clean()
self.assertTrue(mock_local.called)
if __name__ == '__main__':
unittest.main()
| 38.245536
| 123
| 0.657173
| 1,042
| 8,567
| 5.118042
| 0.111324
| 0.046503
| 0.078755
| 0.057754
| 0.843803
| 0.843803
| 0.843803
| 0.836115
| 0.808738
| 0.808738
| 0
| 0.002381
| 0.215478
| 8,567
| 223
| 124
| 38.41704
| 0.791103
| 0.012023
| 0
| 0.759777
| 0
| 0
| 0.291702
| 0.171182
| 0
| 0
| 0
| 0
| 0.061453
| 1
| 0.027933
| false
| 0
| 0.027933
| 0
| 0.061453
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 7
|
9b264017ad1b2785a36d0f32a430b293866b18f7
| 2,117
|
py
|
Python
|
app/hid/keyboard_test.py
|
limshengli/tinypilot
|
aeba23e2e108008bea2b7577f16cfef949238648
|
[
"MIT"
] | 1,334
|
2020-07-14T01:53:02.000Z
|
2021-06-08T09:48:28.000Z
|
app/hid/keyboard_test.py
|
limshengli/tinypilot
|
aeba23e2e108008bea2b7577f16cfef949238648
|
[
"MIT"
] | 320
|
2020-07-07T20:18:05.000Z
|
2021-06-07T21:18:42.000Z
|
app/hid/keyboard_test.py
|
limshengli/tinypilot
|
aeba23e2e108008bea2b7577f16cfef949238648
|
[
"MIT"
] | 124
|
2020-07-23T16:39:06.000Z
|
2021-06-04T10:22:53.000Z
|
import tempfile
import unittest
from hid import keyboard
from hid import keycodes
class KeyboardTest(unittest.TestCase):
def test_send_hid_keycode_to_hid_interface(self):
with tempfile.NamedTemporaryFile() as input_file:
keyboard.send_keystroke(
keyboard_path=input_file.name,
control_keys=keycodes.KEYCODE_NONE,
hid_keycode=keycodes.KEYCODE_A,
)
input_file.seek(0)
# Press the key then release the key.
self.assertEqual(
b'\x00\x00\x04\x00\x00\x00\x00\x00'
b'\x00\x00\x00\x00\x00\x00\x00\x00', input_file.read())
def test_send_control_key_to_hid_interface(self):
with tempfile.NamedTemporaryFile() as input_file:
keyboard.send_keystroke(
keyboard_path=input_file.name,
control_keys=keycodes.MODIFIER_LEFT_SHIFT,
hid_keycode=keycodes.KEYCODE_NONE,
)
input_file.seek(0)
# Press the key, but do not release the key. This is to allow for
# shift+click type behavior.
self.assertEqual(b'\x02\x00\x00\x00\x00\x00\x00\x00',
input_file.read())
def test_send_control_key_and_hid_keycode_to_hid_interface(self):
with tempfile.NamedTemporaryFile() as input_file:
keyboard.send_keystroke(
keyboard_path=input_file.name,
control_keys=keycodes.MODIFIER_LEFT_SHIFT,
hid_keycode=keycodes.KEYCODE_A,
)
input_file.seek(0)
# Press the key then release the key.
self.assertEqual(
b'\x02\x00\x04\x00\x00\x00\x00\x00'
b'\x00\x00\x00\x00\x00\x00\x00\x00', input_file.read())
def test_send_release_keys_to_hid_interface(self):
with tempfile.NamedTemporaryFile() as input_file:
keyboard.release_keys(keyboard_path=input_file.name)
self.assertEqual(b'\x00\x00\x00\x00\x00\x00\x00\x00',
input_file.read())
| 39.203704
| 77
| 0.612187
| 261
| 2,117
| 4.720307
| 0.214559
| 0.175325
| 0.211851
| 0.224026
| 0.82711
| 0.774351
| 0.774351
| 0.754058
| 0.754058
| 0.754058
| 0
| 0.067073
| 0.302787
| 2,117
| 53
| 78
| 39.943396
| 0.767615
| 0.076523
| 0
| 0.547619
| 0
| 0
| 0.098462
| 0.098462
| 0
| 0
| 0
| 0
| 0.095238
| 1
| 0.095238
| false
| 0
| 0.095238
| 0
| 0.214286
| 0
| 0
| 0
| 0
| null | 0
| 1
| 1
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 8
|
f1c800105e4b0ed688fc4ee1efea355c56ada0d8
| 155
|
py
|
Python
|
deep_phospho/proteomics_utils/colors/__init__.py
|
weizhenFrank/DeepPhospho
|
e720b867528d92f9a1a5ec840484989af2b8eb63
|
[
"MIT"
] | 2
|
2021-11-25T01:06:18.000Z
|
2021-12-22T06:34:53.000Z
|
deep_phospho/proteomics_utils/colors/__init__.py
|
weizhenFrank/DeepPhospho
|
e720b867528d92f9a1a5ec840484989af2b8eb63
|
[
"MIT"
] | null | null | null |
deep_phospho/proteomics_utils/colors/__init__.py
|
weizhenFrank/DeepPhospho
|
e720b867528d92f9a1a5ec840484989af2b8eb63
|
[
"MIT"
] | 1
|
2021-04-26T03:00:48.000Z
|
2021-04-26T03:00:48.000Z
|
from . import precolors_grad
from .precolors_grad import PreColorDict
from . import precolors_single
from . import rgb_hsl
from .rgb_hex import hex_to_rgb
| 25.833333
| 40
| 0.83871
| 24
| 155
| 5.125
| 0.416667
| 0.243902
| 0.308943
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.129032
| 155
| 5
| 41
| 31
| 0.911111
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| true
| 0
| 1
| 0
| 1
| 0
| 1
| 0
| 0
| null | 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 0
| 1
| 0
|
0
| 7
|
9e97c30f73e1f5ea6575498d6261b3af4a17067d
| 2,695
|
py
|
Python
|
tests/unit/threading_test.py
|
deckar01/ratelim
|
d9ee2ba149d4f75e60f2ab4651ccac18d54e7c24
|
[
"MIT"
] | 13
|
2020-03-24T23:57:14.000Z
|
2021-12-23T14:14:40.000Z
|
tests/unit/threading_test.py
|
deckar01/ratelim
|
d9ee2ba149d4f75e60f2ab4651ccac18d54e7c24
|
[
"MIT"
] | 8
|
2020-03-24T13:43:41.000Z
|
2021-11-19T21:21:53.000Z
|
tests/unit/threading_test.py
|
deckar01/ratelim
|
d9ee2ba149d4f75e60f2ab4651ccac18d54e7c24
|
[
"MIT"
] | 4
|
2020-05-24T20:08:48.000Z
|
2021-01-24T05:29:38.000Z
|
from threading import Thread
from queue import Queue
import unittest
import sqlite3
from ratelimit import limits, RateLimitException
@limits(calls=2, period=10)
def decorate_in_shared(q):
"""
Increment the counter at most once every 10 seconds.
"""
q.put(1)
def decorate_in_thread(q):
"""
Increment the counter at most once every 10 seconds.
"""
@limits(calls=2, period=10)
def run():
q.put(1)
run()
class TestPreThreading(unittest.TestCase):
def setup_method(self, _):
self.shared = {"count": 0}
self.queue = Queue()
self.threads = [
Thread(target=decorate_in_shared, args=(self.queue,)) for _ in range(2)
]
def teardown_method(self, _):
database = sqlite3.connect("file:ratelimit?mode=memory&cache=shared", uri=True)
database.execute("DELETE FROM main_limit")
database.commit()
database.close()
def test_increment(self):
self.threads[0].start()
self.threads[1].start()
self.threads[0].join()
self.threads[1].join()
self.assertEqual(self.queue.qsize(), 2)
def test_exception(self):
self.threads[0].start()
self.threads[1].start()
self.threads[0].join()
self.threads[1].join()
database = sqlite3.connect(
"file:ratelimit?mode=memory&cache=shared",
uri=True,
)
a = database.execute("SELECT time from main_limit").fetchall()
self.assertRaises(RateLimitException, decorate_in_shared, q=self.queue)
class TestPostThreading(unittest.TestCase):
def setup_method(self, _):
self.shared = {"count": 0}
self.queue = Queue()
self.threads = [
Thread(target=decorate_in_thread, args=(self.queue,)) for _ in range(2)
]
def teardown_method(self, _):
database = sqlite3.connect("file:ratelimit?mode=memory&cache=shared", uri=True)
database.execute("DELETE FROM main_limit")
database.commit()
database.close()
def test_increment(self):
self.threads[0].start()
self.threads[1].start()
self.threads[0].join()
self.threads[1].join()
self.assertEqual(self.queue.qsize(), 2)
def test_exception(self):
self.threads[0].start()
self.threads[1].start()
self.threads[0].join()
self.threads[1].join()
database = sqlite3.connect(
"file:ratelimit?mode=memory&cache=shared",
uri=True,
)
a = database.execute("SELECT time from main_limit").fetchall()
self.assertRaises(RateLimitException, decorate_in_thread, q=self.queue)
| 28.670213
| 87
| 0.616327
| 322
| 2,695
| 5.065217
| 0.217391
| 0.121398
| 0.05886
| 0.063765
| 0.84488
| 0.84488
| 0.816677
| 0.816677
| 0.816677
| 0.816677
| 0
| 0.019374
| 0.253061
| 2,695
| 93
| 88
| 28.978495
| 0.790859
| 0.038961
| 0
| 0.732394
| 0
| 0
| 0.103165
| 0.060961
| 0
| 0
| 0
| 0
| 0.056338
| 1
| 0.15493
| false
| 0
| 0.070423
| 0
| 0.253521
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 7
|
7bc2d75deb50e4f144923e659909e41c0df69c96
| 271
|
py
|
Python
|
allennlp_models/structured_prediction/__init__.py
|
matt-peters/allennlp-models
|
cdd505ed539fdc2b82e4cc0a23eae4bfd3368e7e
|
[
"Apache-2.0"
] | 402
|
2020-03-11T22:58:35.000Z
|
2022-03-29T09:05:27.000Z
|
allennlp_models/structured_prediction/__init__.py
|
matt-peters/allennlp-models
|
cdd505ed539fdc2b82e4cc0a23eae4bfd3368e7e
|
[
"Apache-2.0"
] | 116
|
2020-03-11T01:26:57.000Z
|
2022-03-25T13:03:56.000Z
|
allennlp_models/structured_prediction/__init__.py
|
matt-peters/allennlp-models
|
cdd505ed539fdc2b82e4cc0a23eae4bfd3368e7e
|
[
"Apache-2.0"
] | 140
|
2020-03-11T00:51:35.000Z
|
2022-03-29T09:05:36.000Z
|
# flake8: noqa: F403
from allennlp_models.structured_prediction.predictors import *
from allennlp_models.structured_prediction.dataset_readers import *
from allennlp_models.structured_prediction.metrics import *
from allennlp_models.structured_prediction.models import *
| 45.166667
| 67
| 0.870849
| 32
| 271
| 7.09375
| 0.40625
| 0.211454
| 0.317181
| 0.493392
| 0.748899
| 0.581498
| 0
| 0
| 0
| 0
| 0
| 0.015936
| 0.073801
| 271
| 5
| 68
| 54.2
| 0.888446
| 0.066421
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| true
| 0
| 1
| 0
| 1
| 0
| 0
| 0
| 0
| null | 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 0
| 1
| 0
|
0
| 7
|
cdd609c5d04e5f14555c69eb8c5d321431b08db7
| 14,272
|
py
|
Python
|
pythologistTK/annotation.py
|
pilarOrtega/pythologistTK
|
fb970091b952dbf852ed456207c60dd07a24d7a5
|
[
"BSD-2-Clause"
] | null | null | null |
pythologistTK/annotation.py
|
pilarOrtega/pythologistTK
|
fb970091b952dbf852ed456207c60dd07a24d7a5
|
[
"BSD-2-Clause"
] | null | null | null |
pythologistTK/annotation.py
|
pilarOrtega/pythologistTK
|
fb970091b952dbf852ed456207c60dd07a24d7a5
|
[
"BSD-2-Clause"
] | 2
|
2020-06-24T13:26:04.000Z
|
2020-06-25T07:08:38.000Z
|
# coding: utf8
"""
Annotation classes
Original author: Arnaud Abreu
"""
from tkinter import *
from tkinter import ttk
from tkinter.filedialog import *
from PIL import ImageTk
from pythologistTK import view
class AnnotationTab:
def __init__(self, master, model):
self.master = master
self.model = model
self.isannotation = False
self.image = None
self.photoimage = None
# annotation pannel
self.annotationPannel = ttk.Frame(self.master, width=200)
self.annotationPannel.pack(side=LEFT, fill=Y)
# annotation labeled pannel (inside annotation pannel)
self.annotationSubPannel = ttk.LabelFrame(self.annotationPannel,
width=190,
text="Annotation Browser")
self.annotationSubPannel.pack(side=TOP, fill=BOTH, expand=YES)
self.maskProposal = ttk.Combobox(self.annotationSubPannel)
self.maskProposal.bind('<<ComboboxSelected>>', self.maskAnnotation)
self.maskProposal.pack(side=TOP, fill=X)
# scrollable annotation list (inside annotation labeled pannel)
self.scrollannotations = ttk.Scrollbar(self.annotationSubPannel)
self.annotationList = Listbox(self.annotationSubPannel, bg="gray25")
self.annotationList.bind("<<ListboxSelect>>", self.checkAnnotation)
self.scrollannotations.config(command=self.annotationList.yview)
self.annotationList.pack(side=LEFT, fill=BOTH, expand=YES)
# individual annotation pannel
self.individualPannel = ttk.Frame(self.master, width=600)
self.individualPannel.pack(side=LEFT, fill=BOTH, expand=YES)
# annotation rough description pannel
self.descriptionPannel = ttk.LabelFrame(self.individualPannel,
width=200,
text="Annotation Description")
self.descriptionPannel.pack(side=LEFT, fill=Y)
self.propertyList = Listbox(self.descriptionPannel, bg="gray25")
self.propertyList.pack(side=TOP, fill=BOTH, expand=YES)
# annotation thumbnail
self.patchPannel = ttk.Frame(self.individualPannel)
self.patchPannel.pack(side=LEFT, fill=BOTH, expand=YES)
# future "transform pannel" at the bottom
self.processPannel = ttk.LabelFrame(self.patchPannel,
height=400,
text="Processes")
self.processPannel.pack(side=BOTTOM, fill=X, expand=YES)
self.processButtonPannel = ttk.Frame(self.processPannel, height=50)
self.processButtonPannel.pack(side=BOTTOM, fill=X, expand=YES)
self.processButton = ttk.Button(self.processButtonPannel,
text="Process",
command=self.runProcess)
self.processButton.pack()
self.progressBar = ttk.Progressbar(self.processPannel)
self.progressBar.pack(side=BOTTOM, fill=X, expand=YES)
self.processList = Listbox(self.processPannel, bg="gray25")
self.processList.pack(side=TOP, fill=BOTH, expand=YES)
# viewer is a pretty bad idea or I'll have to modify it deeply
self.patchView = view.ResizableCanvas(self.patchPannel,
bg="black",
highlightthickness=0)
self.patchView.pack(side=TOP, fill=BOTH, expand=YES)
def initAnnot(self):
self.annotationList.delete(0, END)
namesNcolors = self.model.annotationNames()
for name in namesNcolors:
self.annotationList.insert(END, name["name"])
self.annotationList.itemconfig(END, foreground=name["color"])
properties = self.model.annotationUniqueProperties()
properties.append("All")
self.maskProposal["values"] = properties
self.processList.delete(0, END)
processes = self.model.findProcesses()
for p in processes:
self.processList.insert(END, p)
def checkAnnotation(self, evt):
if self.isannotation:
w = evt.widget
index = int(w.curselection()[0])
color = self.annotationList.itemcget(index, "foreground")
value = w.get(index)
detail = self.model.detailedAnnotation(value)
self.propertyList.delete(0, END)
for d in detail:
self.propertyList.insert(END, d)
bbx, self.image = self.model.imageAnnotation(value)
self.image.putalpha(255)
self.photoimage = ImageTk.PhotoImage(self.image)
self.patchView.delete("all")
self.patchView.create_image(0,
0,
anchor=NW,
image=self.photoimage,
tags="image")
self.patchView.create_rectangle(bbx[0], bbx[1], bbx[2], bbx[3], outline=color)
self.patchView.pack()
def maskAnnotation(self, evt):
val = self.maskProposal.get()
if val == "All":
self.initAnnot()
else:
namesNcolors = self.model.annotationNamesByPropertyVal(val)
self.annotationList.delete(0, END)
for name in namesNcolors:
self.annotationList.insert(END, name["name"])
self.annotationList.itemconfig(END, foreground=name["color"])
def runProcess(self):
if self.processList.get(ACTIVE):
self.model.runProcess(self.processList.get(ACTIVE),
self.progressBar)
self.initAnnot()
class AnnotationTabV2:
def __init__(self, master, model):
self.master = master
self.model = model
self.isfileannotation = False
self.isannotation = False
self.image = None
self.photoimage = None
# file annotation panel
self.fileAnnotationPannel = ttk.Frame(self.master, width=200)
self.fileAnnotationPannel.pack(side=LEFT, fill=Y)
# file annotation labeled pannel (inside annotation pannel)
self.fileAnnotationSubPannel = ttk.LabelFrame(self.fileAnnotationPannel,
width=190,
text="Annotation Files")
self.fileAnnotationSubPannel.pack(side=TOP, fill=BOTH, expand=YES)
# add new annotation file button
self.buttonAddAnnotationFile = ttk.Button(self.fileAnnotationSubPannel,
text="Add File",
command=self.addAnnotationFile)
self.buttonAddAnnotationFile.pack()
# add remove annotation file button
self.buttonRemoveAnnotationFile = ttk.Button(self.fileAnnotationSubPannel,
text="Remove File",
command=self.removeAnnotationFile)
self.buttonRemoveAnnotationFile.pack()
# scrollable file annotation list (inside file annotation labeled pannel)
self.scrollannotationfiles = ttk.Scrollbar(self.fileAnnotationSubPannel)
self.annotationFileList = Listbox(self.fileAnnotationSubPannel, bg="gray25")
self.annotationFileList.bind("<<ListboxSelect>>", self.checkAnnotationFile)
self.scrollannotationfiles.config(command=self.annotationFileList.yview)
self.annotationFileList.pack(side=LEFT, fill=BOTH, expand=YES)
# annotation pannel
self.annotationPannel = ttk.Frame(self.master, width=200)
self.annotationPannel.pack(side=LEFT, fill=Y)
# annotation labeled pannel (inside annotation pannel)
self.annotationSubPannel = ttk.LabelFrame(self.annotationPannel,
width=190,
text="Annotation Browser")
self.annotationSubPannel.pack(side=TOP, fill=BOTH, expand=YES)
self.maskProposal = ttk.Combobox(self.annotationSubPannel)
self.maskProposal.bind('<<ComboboxSelected>>', self.maskAnnotation)
self.maskProposal.pack(side=TOP, fill=X)
# scrollable annotation list (inside annotation labeled pannel)
self.scrollannotations = ttk.Scrollbar(self.annotationSubPannel)
self.annotationList = Listbox(self.annotationSubPannel, bg="gray25")
self.annotationList.bind("<<ListboxSelect>>", self.checkAnnotation)
self.scrollannotations.config(command=self.annotationList.yview)
self.annotationList.pack(side=LEFT, fill=BOTH, expand=YES)
# individual annotation pannel
self.individualPannel = ttk.Frame(self.master, width=600)
self.individualPannel.pack(side=LEFT, fill=BOTH, expand=YES)
# annotation rough description pannel
self.descriptionPannel = ttk.LabelFrame(self.individualPannel,
width=200,
text="Annotation Description")
self.descriptionPannel.pack(side=LEFT, fill=Y)
self.propertyList = Listbox(self.descriptionPannel, bg="gray25")
self.propertyList.pack(side=TOP, fill=BOTH, expand=YES)
# annotation thumbnail
self.patchPannel = ttk.Frame(self.individualPannel)
self.patchPannel.pack(side=LEFT, fill=BOTH, expand=YES)
# future "transform pannel" at the bottom
self.processPannel = ttk.LabelFrame(self.patchPannel,
height=400,
text="Processes")
self.processPannel.pack(side=BOTTOM, fill=X, expand=YES)
self.processButtonPannel = ttk.Frame(self.processPannel, height=50)
self.processButtonPannel.pack(side=BOTTOM, fill=X, expand=YES)
self.processButton = ttk.Button(self.processButtonPannel,
text="Process",
command=self.runProcess)
self.processButton.pack()
self.progressBar = ttk.Progressbar(self.processPannel)
self.progressBar.pack(side=BOTTOM, fill=X, expand=YES)
self.processList = Listbox(self.processPannel, bg="gray25")
self.processList.pack(side=TOP, fill=BOTH, expand=YES)
# viewer is a pretty bad idea or I'll have to modify it deeply
self.patchView = view.ResizableCanvas(self.patchPannel,
bg="black",
highlightthickness=0)
self.patchView.pack(side=TOP, fill=BOTH, expand=YES)
def initAnnot(self):
self.annotationList.delete(0, END)
namesNcolors = self.model.annotationNames()
for name in namesNcolors:
self.annotationList.insert(END, name["name"])
self.annotationList.itemconfig(END, foreground=name["color"])
properties = self.model.annotationUniqueProperties()
properties.append("All")
self.maskProposal["values"] = properties
self.processList.delete(0, END)
processes = self.model.findProcesses()
for p in processes:
self.processList.insert(END, p)
def checkAnnotationFile(self, evt):
self.propertyList.delete(0, END)
w = evt.widget
index = int(w.curselection()[0])
filename = w.get(index)
if filename:
self.model.open_annotation_files(filename)
self.isfileannotation = True
if '.annot' in filename:
self.initAnnot()
def addAnnotationFile(self):
filename = askopenfilename(title='open annotation file',
filetypes=[('annot files', '.annot'),
('tif files', '.tif'),
('tiff files', '.tiff'),
('png files', '.png')])
if filename:
self.model.open_annotation_files(filename)
self.annotationFileList.insert(END, filename)
self.isfileannotation = True
def removeAnnotationFile(self):
index = int(self.annotationFileList.curselection()[0])
self.propertyList.delete(0, END)
self.annotationList.delete(0, END)
self.annotationFileList.delete(index)
def checkAnnotation(self, evt):
if self.isannotation:
w = evt.widget
index = int(w.curselection()[0])
color = self.annotationList.itemcget(index, "foreground")
value = w.get(index)
detail = self.model.detailedAnnotation(value)
self.propertyList.delete(0, END)
for d in detail:
self.propertyList.insert(END, d)
bbx, self.image = self.model.imageAnnotation(value)
self.image.putalpha(255)
self.photoimage = ImageTk.PhotoImage(self.image)
self.patchView.delete("all")
self.patchView.create_image(0,
0,
anchor=NW,
image=self.photoimage,
tags="image")
self.patchView.create_rectangle(bbx[0], bbx[1], bbx[2], bbx[3], outline=color)
self.patchView.pack()
def maskAnnotation(self, evt):
val = self.maskProposal.get()
if val == "All":
self.initAnnot()
else:
namesNcolors = self.model.annotationNamesByPropertyVal(val)
self.annotationList.delete(0, END)
for name in namesNcolors:
self.annotationList.insert(END, name["name"])
self.annotationList.itemconfig(END, foreground=name["color"])
def runProcess(self):
if self.processList.get(ACTIVE):
self.model.runProcess(self.processList.get(ACTIVE),
self.progressBar)
self.initAnnot()
| 45.022082
| 90
| 0.586183
| 1,326
| 14,272
| 6.297134
| 0.143288
| 0.027784
| 0.026826
| 0.032575
| 0.83497
| 0.813772
| 0.813772
| 0.800958
| 0.793653
| 0.769701
| 0
| 0.009359
| 0.318736
| 14,272
| 316
| 91
| 45.164557
| 0.849429
| 0.064392
| 0
| 0.841463
| 0
| 0
| 0.035275
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0.052846
| false
| 0
| 0.020325
| 0
| 0.081301
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 7
|
a81a387c0f8620b9bf7d62ac64648e16758c0794
| 24,129
|
py
|
Python
|
sdk/python/pulumi_azure/network/virtual_hub_route_table_route.py
|
henriktao/pulumi-azure
|
f1cbcf100b42b916da36d8fe28be3a159abaf022
|
[
"ECL-2.0",
"Apache-2.0"
] | 109
|
2018-06-18T00:19:44.000Z
|
2022-02-20T05:32:57.000Z
|
sdk/python/pulumi_azure/network/virtual_hub_route_table_route.py
|
henriktao/pulumi-azure
|
f1cbcf100b42b916da36d8fe28be3a159abaf022
|
[
"ECL-2.0",
"Apache-2.0"
] | 663
|
2018-06-18T21:08:46.000Z
|
2022-03-31T20:10:11.000Z
|
sdk/python/pulumi_azure/network/virtual_hub_route_table_route.py
|
henriktao/pulumi-azure
|
f1cbcf100b42b916da36d8fe28be3a159abaf022
|
[
"ECL-2.0",
"Apache-2.0"
] | 41
|
2018-07-19T22:37:38.000Z
|
2022-03-14T10:56:26.000Z
|
# coding=utf-8
# *** WARNING: this file was generated by the Pulumi Terraform Bridge (tfgen) Tool. ***
# *** Do not edit by hand unless you're certain you know what you are doing! ***
import warnings
import pulumi
import pulumi.runtime
from typing import Any, Mapping, Optional, Sequence, Union, overload
from .. import _utilities
__all__ = ['VirtualHubRouteTableRouteInitArgs', 'VirtualHubRouteTableRoute']
@pulumi.input_type
class VirtualHubRouteTableRouteInitArgs:
def __init__(__self__, *,
destinations: pulumi.Input[Sequence[pulumi.Input[str]]],
destinations_type: pulumi.Input[str],
next_hop: pulumi.Input[str],
route_table_id: pulumi.Input[str],
name: Optional[pulumi.Input[str]] = None,
next_hop_type: Optional[pulumi.Input[str]] = None):
"""
The set of arguments for constructing a VirtualHubRouteTableRoute resource.
:param pulumi.Input[Sequence[pulumi.Input[str]]] destinations: A list of destination addresses for this route.
:param pulumi.Input[str] destinations_type: The type of destinations. Possible values are `CIDR`, `ResourceId` and `Service`.
:param pulumi.Input[str] next_hop: The next hop's resource ID.
:param pulumi.Input[str] route_table_id: The ID of the Virtual Hub Route Table to link this route to. Changing this forces a new resource to be created.
:param pulumi.Input[str] name: The name which should be used for this route. Changing this forces a new resource to be created.
:param pulumi.Input[str] next_hop_type: The type of next hop. Currently the only possible value is `ResourceId`. Defaults to `ResourceId`.
"""
pulumi.set(__self__, "destinations", destinations)
pulumi.set(__self__, "destinations_type", destinations_type)
pulumi.set(__self__, "next_hop", next_hop)
pulumi.set(__self__, "route_table_id", route_table_id)
if name is not None:
pulumi.set(__self__, "name", name)
if next_hop_type is not None:
pulumi.set(__self__, "next_hop_type", next_hop_type)
@property
@pulumi.getter
def destinations(self) -> pulumi.Input[Sequence[pulumi.Input[str]]]:
"""
A list of destination addresses for this route.
"""
return pulumi.get(self, "destinations")
@destinations.setter
def destinations(self, value: pulumi.Input[Sequence[pulumi.Input[str]]]):
pulumi.set(self, "destinations", value)
@property
@pulumi.getter(name="destinationsType")
def destinations_type(self) -> pulumi.Input[str]:
"""
The type of destinations. Possible values are `CIDR`, `ResourceId` and `Service`.
"""
return pulumi.get(self, "destinations_type")
@destinations_type.setter
def destinations_type(self, value: pulumi.Input[str]):
pulumi.set(self, "destinations_type", value)
@property
@pulumi.getter(name="nextHop")
def next_hop(self) -> pulumi.Input[str]:
"""
The next hop's resource ID.
"""
return pulumi.get(self, "next_hop")
@next_hop.setter
def next_hop(self, value: pulumi.Input[str]):
pulumi.set(self, "next_hop", value)
@property
@pulumi.getter(name="routeTableId")
def route_table_id(self) -> pulumi.Input[str]:
"""
The ID of the Virtual Hub Route Table to link this route to. Changing this forces a new resource to be created.
"""
return pulumi.get(self, "route_table_id")
@route_table_id.setter
def route_table_id(self, value: pulumi.Input[str]):
pulumi.set(self, "route_table_id", value)
@property
@pulumi.getter
def name(self) -> Optional[pulumi.Input[str]]:
"""
The name which should be used for this route. Changing this forces a new resource to be created.
"""
return pulumi.get(self, "name")
@name.setter
def name(self, value: Optional[pulumi.Input[str]]):
pulumi.set(self, "name", value)
@property
@pulumi.getter(name="nextHopType")
def next_hop_type(self) -> Optional[pulumi.Input[str]]:
"""
The type of next hop. Currently the only possible value is `ResourceId`. Defaults to `ResourceId`.
"""
return pulumi.get(self, "next_hop_type")
@next_hop_type.setter
def next_hop_type(self, value: Optional[pulumi.Input[str]]):
pulumi.set(self, "next_hop_type", value)
@pulumi.input_type
class _VirtualHubRouteTableRouteState:
def __init__(__self__, *,
destinations: Optional[pulumi.Input[Sequence[pulumi.Input[str]]]] = None,
destinations_type: Optional[pulumi.Input[str]] = None,
name: Optional[pulumi.Input[str]] = None,
next_hop: Optional[pulumi.Input[str]] = None,
next_hop_type: Optional[pulumi.Input[str]] = None,
route_table_id: Optional[pulumi.Input[str]] = None):
"""
Input properties used for looking up and filtering VirtualHubRouteTableRoute resources.
:param pulumi.Input[Sequence[pulumi.Input[str]]] destinations: A list of destination addresses for this route.
:param pulumi.Input[str] destinations_type: The type of destinations. Possible values are `CIDR`, `ResourceId` and `Service`.
:param pulumi.Input[str] name: The name which should be used for this route. Changing this forces a new resource to be created.
:param pulumi.Input[str] next_hop: The next hop's resource ID.
:param pulumi.Input[str] next_hop_type: The type of next hop. Currently the only possible value is `ResourceId`. Defaults to `ResourceId`.
:param pulumi.Input[str] route_table_id: The ID of the Virtual Hub Route Table to link this route to. Changing this forces a new resource to be created.
"""
if destinations is not None:
pulumi.set(__self__, "destinations", destinations)
if destinations_type is not None:
pulumi.set(__self__, "destinations_type", destinations_type)
if name is not None:
pulumi.set(__self__, "name", name)
if next_hop is not None:
pulumi.set(__self__, "next_hop", next_hop)
if next_hop_type is not None:
pulumi.set(__self__, "next_hop_type", next_hop_type)
if route_table_id is not None:
pulumi.set(__self__, "route_table_id", route_table_id)
@property
@pulumi.getter
def destinations(self) -> Optional[pulumi.Input[Sequence[pulumi.Input[str]]]]:
"""
A list of destination addresses for this route.
"""
return pulumi.get(self, "destinations")
@destinations.setter
def destinations(self, value: Optional[pulumi.Input[Sequence[pulumi.Input[str]]]]):
pulumi.set(self, "destinations", value)
@property
@pulumi.getter(name="destinationsType")
def destinations_type(self) -> Optional[pulumi.Input[str]]:
"""
The type of destinations. Possible values are `CIDR`, `ResourceId` and `Service`.
"""
return pulumi.get(self, "destinations_type")
@destinations_type.setter
def destinations_type(self, value: Optional[pulumi.Input[str]]):
pulumi.set(self, "destinations_type", value)
@property
@pulumi.getter
def name(self) -> Optional[pulumi.Input[str]]:
"""
The name which should be used for this route. Changing this forces a new resource to be created.
"""
return pulumi.get(self, "name")
@name.setter
def name(self, value: Optional[pulumi.Input[str]]):
pulumi.set(self, "name", value)
@property
@pulumi.getter(name="nextHop")
def next_hop(self) -> Optional[pulumi.Input[str]]:
"""
The next hop's resource ID.
"""
return pulumi.get(self, "next_hop")
@next_hop.setter
def next_hop(self, value: Optional[pulumi.Input[str]]):
pulumi.set(self, "next_hop", value)
@property
@pulumi.getter(name="nextHopType")
def next_hop_type(self) -> Optional[pulumi.Input[str]]:
"""
The type of next hop. Currently the only possible value is `ResourceId`. Defaults to `ResourceId`.
"""
return pulumi.get(self, "next_hop_type")
@next_hop_type.setter
def next_hop_type(self, value: Optional[pulumi.Input[str]]):
pulumi.set(self, "next_hop_type", value)
@property
@pulumi.getter(name="routeTableId")
def route_table_id(self) -> Optional[pulumi.Input[str]]:
"""
The ID of the Virtual Hub Route Table to link this route to. Changing this forces a new resource to be created.
"""
return pulumi.get(self, "route_table_id")
@route_table_id.setter
def route_table_id(self, value: Optional[pulumi.Input[str]]):
pulumi.set(self, "route_table_id", value)
class VirtualHubRouteTableRoute(pulumi.CustomResource):
@overload
def __init__(__self__,
resource_name: str,
opts: Optional[pulumi.ResourceOptions] = None,
destinations: Optional[pulumi.Input[Sequence[pulumi.Input[str]]]] = None,
destinations_type: Optional[pulumi.Input[str]] = None,
name: Optional[pulumi.Input[str]] = None,
next_hop: Optional[pulumi.Input[str]] = None,
next_hop_type: Optional[pulumi.Input[str]] = None,
route_table_id: Optional[pulumi.Input[str]] = None,
__props__=None):
"""
Manages a Route in a Virtual Hub Route Table.
> **Note:** Route table routes can managed with this resource, or in-line with the virtual_hub_route_table resource. Using both is not supported.
## Example Usage
```python
import pulumi
import pulumi_azure as azure
example_resource_group = azure.core.ResourceGroup("exampleResourceGroup", location="West Europe")
example_virtual_network = azure.network.VirtualNetwork("exampleVirtualNetwork",
address_spaces=["10.5.0.0/16"],
location=example_resource_group.location,
resource_group_name=example_resource_group.name)
example_network_security_group = azure.network.NetworkSecurityGroup("exampleNetworkSecurityGroup",
location=example_resource_group.location,
resource_group_name=example_resource_group.name)
example_subnet = azure.network.Subnet("exampleSubnet",
resource_group_name=example_resource_group.name,
virtual_network_name=example_virtual_network.name,
address_prefixes=["10.5.1.0/24"])
example_subnet_network_security_group_association = azure.network.SubnetNetworkSecurityGroupAssociation("exampleSubnetNetworkSecurityGroupAssociation",
subnet_id=example_subnet.id,
network_security_group_id=example_network_security_group.id)
example_virtual_wan = azure.network.VirtualWan("exampleVirtualWan",
resource_group_name=example_resource_group.name,
location=example_resource_group.location)
example_virtual_hub = azure.network.VirtualHub("exampleVirtualHub",
resource_group_name=example_resource_group.name,
location=example_resource_group.location,
virtual_wan_id=example_virtual_wan.id,
address_prefix="10.0.2.0/24")
example_virtual_hub_route_table = azure.network.VirtualHubRouteTable("exampleVirtualHubRouteTable",
virtual_hub_id=example_virtual_hub.id,
labels=["label1"])
example_virtual_hub_connection = azure.network.VirtualHubConnection("exampleVirtualHubConnection",
virtual_hub_id=example_virtual_hub.id,
remote_virtual_network_id=example_virtual_network.id,
routing=azure.network.VirtualHubConnectionRoutingArgs(
associated_route_table_id=example_virtual_hub_route_table.id,
))
example_virtual_hub_route_table_route = azure.network.VirtualHubRouteTableRoute("exampleVirtualHubRouteTableRoute",
route_table_id=example_virtual_hub_route_table.id,
destinations_type="CIDR",
destinations=["10.0.0.0/16"],
next_hop_type="ResourceId",
next_hop=example_virtual_hub_connection.id)
```
## Import
Virtual Hub Route Table Routes can be imported using `<Route Table Resource Id>/routes/<Route Name>`, e.g.
```sh
$ pulumi import azure:network/virtualHubRouteTableRoute:VirtualHubRouteTableRoute example /subscriptions/00000000-0000-0000-0000-000000000000/resourceGroups/group1/providers/Microsoft.Network/virtualHubs/virtualHub1/hubRouteTables/routeTable1/routes/routeName
```
:param str resource_name: The name of the resource.
:param pulumi.ResourceOptions opts: Options for the resource.
:param pulumi.Input[Sequence[pulumi.Input[str]]] destinations: A list of destination addresses for this route.
:param pulumi.Input[str] destinations_type: The type of destinations. Possible values are `CIDR`, `ResourceId` and `Service`.
:param pulumi.Input[str] name: The name which should be used for this route. Changing this forces a new resource to be created.
:param pulumi.Input[str] next_hop: The next hop's resource ID.
:param pulumi.Input[str] next_hop_type: The type of next hop. Currently the only possible value is `ResourceId`. Defaults to `ResourceId`.
:param pulumi.Input[str] route_table_id: The ID of the Virtual Hub Route Table to link this route to. Changing this forces a new resource to be created.
"""
...
@overload
def __init__(__self__,
resource_name: str,
args: VirtualHubRouteTableRouteInitArgs,
opts: Optional[pulumi.ResourceOptions] = None):
"""
Manages a Route in a Virtual Hub Route Table.
> **Note:** Route table routes can managed with this resource, or in-line with the virtual_hub_route_table resource. Using both is not supported.
## Example Usage
```python
import pulumi
import pulumi_azure as azure
example_resource_group = azure.core.ResourceGroup("exampleResourceGroup", location="West Europe")
example_virtual_network = azure.network.VirtualNetwork("exampleVirtualNetwork",
address_spaces=["10.5.0.0/16"],
location=example_resource_group.location,
resource_group_name=example_resource_group.name)
example_network_security_group = azure.network.NetworkSecurityGroup("exampleNetworkSecurityGroup",
location=example_resource_group.location,
resource_group_name=example_resource_group.name)
example_subnet = azure.network.Subnet("exampleSubnet",
resource_group_name=example_resource_group.name,
virtual_network_name=example_virtual_network.name,
address_prefixes=["10.5.1.0/24"])
example_subnet_network_security_group_association = azure.network.SubnetNetworkSecurityGroupAssociation("exampleSubnetNetworkSecurityGroupAssociation",
subnet_id=example_subnet.id,
network_security_group_id=example_network_security_group.id)
example_virtual_wan = azure.network.VirtualWan("exampleVirtualWan",
resource_group_name=example_resource_group.name,
location=example_resource_group.location)
example_virtual_hub = azure.network.VirtualHub("exampleVirtualHub",
resource_group_name=example_resource_group.name,
location=example_resource_group.location,
virtual_wan_id=example_virtual_wan.id,
address_prefix="10.0.2.0/24")
example_virtual_hub_route_table = azure.network.VirtualHubRouteTable("exampleVirtualHubRouteTable",
virtual_hub_id=example_virtual_hub.id,
labels=["label1"])
example_virtual_hub_connection = azure.network.VirtualHubConnection("exampleVirtualHubConnection",
virtual_hub_id=example_virtual_hub.id,
remote_virtual_network_id=example_virtual_network.id,
routing=azure.network.VirtualHubConnectionRoutingArgs(
associated_route_table_id=example_virtual_hub_route_table.id,
))
example_virtual_hub_route_table_route = azure.network.VirtualHubRouteTableRoute("exampleVirtualHubRouteTableRoute",
route_table_id=example_virtual_hub_route_table.id,
destinations_type="CIDR",
destinations=["10.0.0.0/16"],
next_hop_type="ResourceId",
next_hop=example_virtual_hub_connection.id)
```
## Import
Virtual Hub Route Table Routes can be imported using `<Route Table Resource Id>/routes/<Route Name>`, e.g.
```sh
$ pulumi import azure:network/virtualHubRouteTableRoute:VirtualHubRouteTableRoute example /subscriptions/00000000-0000-0000-0000-000000000000/resourceGroups/group1/providers/Microsoft.Network/virtualHubs/virtualHub1/hubRouteTables/routeTable1/routes/routeName
```
:param str resource_name: The name of the resource.
:param VirtualHubRouteTableRouteInitArgs args: The arguments to use to populate this resource's properties.
:param pulumi.ResourceOptions opts: Options for the resource.
"""
...
def __init__(__self__, resource_name: str, *args, **kwargs):
resource_args, opts = _utilities.get_resource_args_opts(VirtualHubRouteTableRouteInitArgs, pulumi.ResourceOptions, *args, **kwargs)
if resource_args is not None:
__self__._internal_init(resource_name, opts, **resource_args.__dict__)
else:
__self__._internal_init(resource_name, *args, **kwargs)
def _internal_init(__self__,
resource_name: str,
opts: Optional[pulumi.ResourceOptions] = None,
destinations: Optional[pulumi.Input[Sequence[pulumi.Input[str]]]] = None,
destinations_type: Optional[pulumi.Input[str]] = None,
name: Optional[pulumi.Input[str]] = None,
next_hop: Optional[pulumi.Input[str]] = None,
next_hop_type: Optional[pulumi.Input[str]] = None,
route_table_id: Optional[pulumi.Input[str]] = None,
__props__=None):
if opts is None:
opts = pulumi.ResourceOptions()
if not isinstance(opts, pulumi.ResourceOptions):
raise TypeError('Expected resource options to be a ResourceOptions instance')
if opts.version is None:
opts.version = _utilities.get_version()
if opts.id is None:
if __props__ is not None:
raise TypeError('__props__ is only valid when passed in combination with a valid opts.id to get an existing resource')
__props__ = VirtualHubRouteTableRouteInitArgs.__new__(VirtualHubRouteTableRouteInitArgs)
if destinations is None and not opts.urn:
raise TypeError("Missing required property 'destinations'")
__props__.__dict__["destinations"] = destinations
if destinations_type is None and not opts.urn:
raise TypeError("Missing required property 'destinations_type'")
__props__.__dict__["destinations_type"] = destinations_type
__props__.__dict__["name"] = name
if next_hop is None and not opts.urn:
raise TypeError("Missing required property 'next_hop'")
__props__.__dict__["next_hop"] = next_hop
__props__.__dict__["next_hop_type"] = next_hop_type
if route_table_id is None and not opts.urn:
raise TypeError("Missing required property 'route_table_id'")
__props__.__dict__["route_table_id"] = route_table_id
super(VirtualHubRouteTableRoute, __self__).__init__(
'azure:network/virtualHubRouteTableRoute:VirtualHubRouteTableRoute',
resource_name,
__props__,
opts)
@staticmethod
def get(resource_name: str,
id: pulumi.Input[str],
opts: Optional[pulumi.ResourceOptions] = None,
destinations: Optional[pulumi.Input[Sequence[pulumi.Input[str]]]] = None,
destinations_type: Optional[pulumi.Input[str]] = None,
name: Optional[pulumi.Input[str]] = None,
next_hop: Optional[pulumi.Input[str]] = None,
next_hop_type: Optional[pulumi.Input[str]] = None,
route_table_id: Optional[pulumi.Input[str]] = None) -> 'VirtualHubRouteTableRoute':
"""
Get an existing VirtualHubRouteTableRoute resource's state with the given name, id, and optional extra
properties used to qualify the lookup.
:param str resource_name: The unique name of the resulting resource.
:param pulumi.Input[str] id: The unique provider ID of the resource to lookup.
:param pulumi.ResourceOptions opts: Options for the resource.
:param pulumi.Input[Sequence[pulumi.Input[str]]] destinations: A list of destination addresses for this route.
:param pulumi.Input[str] destinations_type: The type of destinations. Possible values are `CIDR`, `ResourceId` and `Service`.
:param pulumi.Input[str] name: The name which should be used for this route. Changing this forces a new resource to be created.
:param pulumi.Input[str] next_hop: The next hop's resource ID.
:param pulumi.Input[str] next_hop_type: The type of next hop. Currently the only possible value is `ResourceId`. Defaults to `ResourceId`.
:param pulumi.Input[str] route_table_id: The ID of the Virtual Hub Route Table to link this route to. Changing this forces a new resource to be created.
"""
opts = pulumi.ResourceOptions.merge(opts, pulumi.ResourceOptions(id=id))
__props__ = _VirtualHubRouteTableRouteState.__new__(_VirtualHubRouteTableRouteState)
__props__.__dict__["destinations"] = destinations
__props__.__dict__["destinations_type"] = destinations_type
__props__.__dict__["name"] = name
__props__.__dict__["next_hop"] = next_hop
__props__.__dict__["next_hop_type"] = next_hop_type
__props__.__dict__["route_table_id"] = route_table_id
return VirtualHubRouteTableRoute(resource_name, opts=opts, __props__=__props__)
@property
@pulumi.getter
def destinations(self) -> pulumi.Output[Sequence[str]]:
"""
A list of destination addresses for this route.
"""
return pulumi.get(self, "destinations")
@property
@pulumi.getter(name="destinationsType")
def destinations_type(self) -> pulumi.Output[str]:
"""
The type of destinations. Possible values are `CIDR`, `ResourceId` and `Service`.
"""
return pulumi.get(self, "destinations_type")
@property
@pulumi.getter
def name(self) -> pulumi.Output[str]:
"""
The name which should be used for this route. Changing this forces a new resource to be created.
"""
return pulumi.get(self, "name")
@property
@pulumi.getter(name="nextHop")
def next_hop(self) -> pulumi.Output[str]:
"""
The next hop's resource ID.
"""
return pulumi.get(self, "next_hop")
@property
@pulumi.getter(name="nextHopType")
def next_hop_type(self) -> pulumi.Output[Optional[str]]:
"""
The type of next hop. Currently the only possible value is `ResourceId`. Defaults to `ResourceId`.
"""
return pulumi.get(self, "next_hop_type")
@property
@pulumi.getter(name="routeTableId")
def route_table_id(self) -> pulumi.Output[str]:
"""
The ID of the Virtual Hub Route Table to link this route to. Changing this forces a new resource to be created.
"""
return pulumi.get(self, "route_table_id")
| 48.354709
| 268
| 0.673588
| 2,821
| 24,129
| 5.511521
| 0.080468
| 0.067211
| 0.072035
| 0.050939
| 0.8686
| 0.85548
| 0.842423
| 0.817018
| 0.807371
| 0.801582
| 0
| 0.006961
| 0.23192
| 24,129
| 498
| 269
| 48.451807
| 0.831975
| 0.456422
| 0
| 0.659574
| 1
| 0
| 0.105592
| 0.012831
| 0
| 0
| 0
| 0
| 0
| 1
| 0.157447
| false
| 0.004255
| 0.021277
| 0
| 0.27234
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 7
|
a821b63268477b19423efae90984eebbb99e26d9
| 40,458
|
py
|
Python
|
spinnaker_swagger_client/api/v_2_canary_controller_api.py
|
coveooss/spinnaker_python_client
|
6f5ae436798cb4985ada65cd8169fcc9494d048f
|
[
"Apache-2.0"
] | null | null | null |
spinnaker_swagger_client/api/v_2_canary_controller_api.py
|
coveooss/spinnaker_python_client
|
6f5ae436798cb4985ada65cd8169fcc9494d048f
|
[
"Apache-2.0"
] | null | null | null |
spinnaker_swagger_client/api/v_2_canary_controller_api.py
|
coveooss/spinnaker_python_client
|
6f5ae436798cb4985ada65cd8169fcc9494d048f
|
[
"Apache-2.0"
] | 2
|
2019-10-17T07:49:21.000Z
|
2021-08-10T23:12:41.000Z
|
# coding: utf-8
"""
Spinnaker API
No description provided (generated by Swagger Codegen https://github.com/swagger-api/swagger-codegen) # noqa: E501
OpenAPI spec version: 1.0.0
Generated by: https://github.com/swagger-api/swagger-codegen.git
"""
from __future__ import absolute_import
import re # noqa: F401
# python 2 and python 3 compatibility library
import six
from spinnaker_swagger_client.api_client import ApiClient
class V2CanaryControllerApi(object):
"""NOTE: This class is auto generated by the swagger code generator program.
Do not edit the class manually.
Ref: https://github.com/swagger-api/swagger-codegen
"""
def __init__(self, api_client=None):
if api_client is None:
api_client = ApiClient()
self.api_client = api_client
def get_canary_result_using_get(self, canary_config_id, canary_execution_id, **kwargs): # noqa: E501
"""(DEPRECATED) Retrieve a canary result # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.get_canary_result_using_get(canary_config_id, canary_execution_id, async_req=True)
>>> result = thread.get()
:param async_req bool
:param str canary_config_id: canaryConfigId (required)
:param str canary_execution_id: canaryExecutionId (required)
:param str storage_account_name: storageAccountName
:return: object
If the method is called asynchronously,
returns the request thread.
"""
kwargs['_return_http_data_only'] = True
if kwargs.get('async_req'):
return self.get_canary_result_using_get_with_http_info(canary_config_id, canary_execution_id, **kwargs) # noqa: E501
else:
(data) = self.get_canary_result_using_get_with_http_info(canary_config_id, canary_execution_id, **kwargs) # noqa: E501
return data
def get_canary_result_using_get_with_http_info(self, canary_config_id, canary_execution_id, **kwargs): # noqa: E501
"""(DEPRECATED) Retrieve a canary result # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.get_canary_result_using_get_with_http_info(canary_config_id, canary_execution_id, async_req=True)
>>> result = thread.get()
:param async_req bool
:param str canary_config_id: canaryConfigId (required)
:param str canary_execution_id: canaryExecutionId (required)
:param str storage_account_name: storageAccountName
:return: object
If the method is called asynchronously,
returns the request thread.
"""
all_params = ['canary_config_id', 'canary_execution_id', 'storage_account_name'] # noqa: E501
all_params.append('async_req')
all_params.append('_return_http_data_only')
all_params.append('_preload_content')
all_params.append('_request_timeout')
params = locals()
for key, val in six.iteritems(params['kwargs']):
if key not in all_params:
raise TypeError(
"Got an unexpected keyword argument '%s'"
" to method get_canary_result_using_get" % key
)
params[key] = val
del params['kwargs']
# verify the required parameter 'canary_config_id' is set
if ('canary_config_id' not in params or
params['canary_config_id'] is None):
raise ValueError("Missing the required parameter `canary_config_id` when calling `get_canary_result_using_get`") # noqa: E501
# verify the required parameter 'canary_execution_id' is set
if ('canary_execution_id' not in params or
params['canary_execution_id'] is None):
raise ValueError("Missing the required parameter `canary_execution_id` when calling `get_canary_result_using_get`") # noqa: E501
collection_formats = {}
path_params = {}
if 'canary_config_id' in params:
path_params['canaryConfigId'] = params['canary_config_id'] # noqa: E501
if 'canary_execution_id' in params:
path_params['canaryExecutionId'] = params['canary_execution_id'] # noqa: E501
query_params = []
if 'storage_account_name' in params:
query_params.append(('storageAccountName', params['storage_account_name'])) # noqa: E501
header_params = {}
form_params = []
local_var_files = {}
body_params = None
# HTTP header `Accept`
header_params['Accept'] = self.api_client.select_header_accept(
['*/*']) # noqa: E501
# Authentication setting
auth_settings = [] # noqa: E501
return self.api_client.call_api(
'/v2/canaries/canary/{canaryConfigId}/{canaryExecutionId}', 'GET',
path_params,
query_params,
header_params,
body=body_params,
post_params=form_params,
files=local_var_files,
response_type='object', # noqa: E501
auth_settings=auth_settings,
async_req=params.get('async_req'),
_return_http_data_only=params.get('_return_http_data_only'),
_preload_content=params.get('_preload_content', True),
_request_timeout=params.get('_request_timeout'),
collection_formats=collection_formats)
def get_canary_result_using_get1(self, canary_execution_id, **kwargs): # noqa: E501
"""Retrieve a canary result # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.get_canary_result_using_get1(canary_execution_id, async_req=True)
>>> result = thread.get()
:param async_req bool
:param str canary_execution_id: canaryExecutionId (required)
:param str storage_account_name: storageAccountName
:return: object
If the method is called asynchronously,
returns the request thread.
"""
kwargs['_return_http_data_only'] = True
if kwargs.get('async_req'):
return self.get_canary_result_using_get1_with_http_info(canary_execution_id, **kwargs) # noqa: E501
else:
(data) = self.get_canary_result_using_get1_with_http_info(canary_execution_id, **kwargs) # noqa: E501
return data
def get_canary_result_using_get1_with_http_info(self, canary_execution_id, **kwargs): # noqa: E501
"""Retrieve a canary result # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.get_canary_result_using_get1_with_http_info(canary_execution_id, async_req=True)
>>> result = thread.get()
:param async_req bool
:param str canary_execution_id: canaryExecutionId (required)
:param str storage_account_name: storageAccountName
:return: object
If the method is called asynchronously,
returns the request thread.
"""
all_params = ['canary_execution_id', 'storage_account_name'] # noqa: E501
all_params.append('async_req')
all_params.append('_return_http_data_only')
all_params.append('_preload_content')
all_params.append('_request_timeout')
params = locals()
for key, val in six.iteritems(params['kwargs']):
if key not in all_params:
raise TypeError(
"Got an unexpected keyword argument '%s'"
" to method get_canary_result_using_get1" % key
)
params[key] = val
del params['kwargs']
# verify the required parameter 'canary_execution_id' is set
if ('canary_execution_id' not in params or
params['canary_execution_id'] is None):
raise ValueError("Missing the required parameter `canary_execution_id` when calling `get_canary_result_using_get1`") # noqa: E501
collection_formats = {}
path_params = {}
if 'canary_execution_id' in params:
path_params['canaryExecutionId'] = params['canary_execution_id'] # noqa: E501
query_params = []
if 'storage_account_name' in params:
query_params.append(('storageAccountName', params['storage_account_name'])) # noqa: E501
header_params = {}
form_params = []
local_var_files = {}
body_params = None
# HTTP header `Accept`
header_params['Accept'] = self.api_client.select_header_accept(
['*/*']) # noqa: E501
# Authentication setting
auth_settings = [] # noqa: E501
return self.api_client.call_api(
'/v2/canaries/canary/{canaryExecutionId}', 'GET',
path_params,
query_params,
header_params,
body=body_params,
post_params=form_params,
files=local_var_files,
response_type='object', # noqa: E501
auth_settings=auth_settings,
async_req=params.get('async_req'),
_return_http_data_only=params.get('_return_http_data_only'),
_preload_content=params.get('_preload_content', True),
_request_timeout=params.get('_request_timeout'),
collection_formats=collection_formats)
def get_canary_results_by_application_using_get(self, application, limit, **kwargs): # noqa: E501
"""Retrieve a list of an application's canary results # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.get_canary_results_by_application_using_get(application, limit, async_req=True)
>>> result = thread.get()
:param async_req bool
:param str application: application (required)
:param int limit: limit (required)
:param int page: page
:param str statuses: Comma-separated list of statuses, e.g.: RUNNING, SUCCEEDED, TERMINAL
:param str storage_account_name: storageAccountName
:return: list[object]
If the method is called asynchronously,
returns the request thread.
"""
kwargs['_return_http_data_only'] = True
if kwargs.get('async_req'):
return self.get_canary_results_by_application_using_get_with_http_info(application, limit, **kwargs) # noqa: E501
else:
(data) = self.get_canary_results_by_application_using_get_with_http_info(application, limit, **kwargs) # noqa: E501
return data
def get_canary_results_by_application_using_get_with_http_info(self, application, limit, **kwargs): # noqa: E501
"""Retrieve a list of an application's canary results # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.get_canary_results_by_application_using_get_with_http_info(application, limit, async_req=True)
>>> result = thread.get()
:param async_req bool
:param str application: application (required)
:param int limit: limit (required)
:param int page: page
:param str statuses: Comma-separated list of statuses, e.g.: RUNNING, SUCCEEDED, TERMINAL
:param str storage_account_name: storageAccountName
:return: list[object]
If the method is called asynchronously,
returns the request thread.
"""
all_params = ['application', 'limit', 'page', 'statuses', 'storage_account_name'] # noqa: E501
all_params.append('async_req')
all_params.append('_return_http_data_only')
all_params.append('_preload_content')
all_params.append('_request_timeout')
params = locals()
for key, val in six.iteritems(params['kwargs']):
if key not in all_params:
raise TypeError(
"Got an unexpected keyword argument '%s'"
" to method get_canary_results_by_application_using_get" % key
)
params[key] = val
del params['kwargs']
# verify the required parameter 'application' is set
if ('application' not in params or
params['application'] is None):
raise ValueError("Missing the required parameter `application` when calling `get_canary_results_by_application_using_get`") # noqa: E501
# verify the required parameter 'limit' is set
if ('limit' not in params or
params['limit'] is None):
raise ValueError("Missing the required parameter `limit` when calling `get_canary_results_by_application_using_get`") # noqa: E501
collection_formats = {}
path_params = {}
if 'application' in params:
path_params['application'] = params['application'] # noqa: E501
query_params = []
if 'limit' in params:
query_params.append(('limit', params['limit'])) # noqa: E501
if 'page' in params:
query_params.append(('page', params['page'])) # noqa: E501
if 'statuses' in params:
query_params.append(('statuses', params['statuses'])) # noqa: E501
if 'storage_account_name' in params:
query_params.append(('storageAccountName', params['storage_account_name'])) # noqa: E501
header_params = {}
form_params = []
local_var_files = {}
body_params = None
# HTTP header `Accept`
header_params['Accept'] = self.api_client.select_header_accept(
['*/*']) # noqa: E501
# Authentication setting
auth_settings = [] # noqa: E501
return self.api_client.call_api(
'/v2/canaries/{application}/executions', 'GET',
path_params,
query_params,
header_params,
body=body_params,
post_params=form_params,
files=local_var_files,
response_type='list[object]', # noqa: E501
auth_settings=auth_settings,
async_req=params.get('async_req'),
_return_http_data_only=params.get('_return_http_data_only'),
_preload_content=params.get('_preload_content', True),
_request_timeout=params.get('_request_timeout'),
collection_formats=collection_formats)
def get_metric_set_pair_list_using_get(self, metric_set_pair_list_id, **kwargs): # noqa: E501
"""Retrieve a metric set pair list # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.get_metric_set_pair_list_using_get(metric_set_pair_list_id, async_req=True)
>>> result = thread.get()
:param async_req bool
:param str metric_set_pair_list_id: metricSetPairListId (required)
:param str storage_account_name: storageAccountName
:return: list[object]
If the method is called asynchronously,
returns the request thread.
"""
kwargs['_return_http_data_only'] = True
if kwargs.get('async_req'):
return self.get_metric_set_pair_list_using_get_with_http_info(metric_set_pair_list_id, **kwargs) # noqa: E501
else:
(data) = self.get_metric_set_pair_list_using_get_with_http_info(metric_set_pair_list_id, **kwargs) # noqa: E501
return data
def get_metric_set_pair_list_using_get_with_http_info(self, metric_set_pair_list_id, **kwargs): # noqa: E501
"""Retrieve a metric set pair list # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.get_metric_set_pair_list_using_get_with_http_info(metric_set_pair_list_id, async_req=True)
>>> result = thread.get()
:param async_req bool
:param str metric_set_pair_list_id: metricSetPairListId (required)
:param str storage_account_name: storageAccountName
:return: list[object]
If the method is called asynchronously,
returns the request thread.
"""
all_params = ['metric_set_pair_list_id', 'storage_account_name'] # noqa: E501
all_params.append('async_req')
all_params.append('_return_http_data_only')
all_params.append('_preload_content')
all_params.append('_request_timeout')
params = locals()
for key, val in six.iteritems(params['kwargs']):
if key not in all_params:
raise TypeError(
"Got an unexpected keyword argument '%s'"
" to method get_metric_set_pair_list_using_get" % key
)
params[key] = val
del params['kwargs']
# verify the required parameter 'metric_set_pair_list_id' is set
if ('metric_set_pair_list_id' not in params or
params['metric_set_pair_list_id'] is None):
raise ValueError("Missing the required parameter `metric_set_pair_list_id` when calling `get_metric_set_pair_list_using_get`") # noqa: E501
collection_formats = {}
path_params = {}
if 'metric_set_pair_list_id' in params:
path_params['metricSetPairListId'] = params['metric_set_pair_list_id'] # noqa: E501
query_params = []
if 'storage_account_name' in params:
query_params.append(('storageAccountName', params['storage_account_name'])) # noqa: E501
header_params = {}
form_params = []
local_var_files = {}
body_params = None
# HTTP header `Accept`
header_params['Accept'] = self.api_client.select_header_accept(
['*/*']) # noqa: E501
# Authentication setting
auth_settings = [] # noqa: E501
return self.api_client.call_api(
'/v2/canaries/metricSetPairList/{metricSetPairListId}', 'GET',
path_params,
query_params,
header_params,
body=body_params,
post_params=form_params,
files=local_var_files,
response_type='list[object]', # noqa: E501
auth_settings=auth_settings,
async_req=params.get('async_req'),
_return_http_data_only=params.get('_return_http_data_only'),
_preload_content=params.get('_preload_content', True),
_request_timeout=params.get('_request_timeout'),
collection_formats=collection_formats)
def initiate_canary_using_post(self, canary_config_id, execution_request, **kwargs): # noqa: E501
"""Start a canary execution # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.initiate_canary_using_post(canary_config_id, execution_request, async_req=True)
>>> result = thread.get()
:param async_req bool
:param str canary_config_id: canaryConfigId (required)
:param object execution_request: executionRequest (required)
:param str application: application
:param str configuration_account_name: configurationAccountName
:param str metrics_account_name: metricsAccountName
:param str parent_pipeline_execution_id: parentPipelineExecutionId
:param str storage_account_name: storageAccountName
:return: object
If the method is called asynchronously,
returns the request thread.
"""
kwargs['_return_http_data_only'] = True
if kwargs.get('async_req'):
return self.initiate_canary_using_post_with_http_info(canary_config_id, execution_request, **kwargs) # noqa: E501
else:
(data) = self.initiate_canary_using_post_with_http_info(canary_config_id, execution_request, **kwargs) # noqa: E501
return data
def initiate_canary_using_post_with_http_info(self, canary_config_id, execution_request, **kwargs): # noqa: E501
"""Start a canary execution # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.initiate_canary_using_post_with_http_info(canary_config_id, execution_request, async_req=True)
>>> result = thread.get()
:param async_req bool
:param str canary_config_id: canaryConfigId (required)
:param object execution_request: executionRequest (required)
:param str application: application
:param str configuration_account_name: configurationAccountName
:param str metrics_account_name: metricsAccountName
:param str parent_pipeline_execution_id: parentPipelineExecutionId
:param str storage_account_name: storageAccountName
:return: object
If the method is called asynchronously,
returns the request thread.
"""
all_params = ['canary_config_id', 'execution_request', 'application', 'configuration_account_name', 'metrics_account_name', 'parent_pipeline_execution_id', 'storage_account_name'] # noqa: E501
all_params.append('async_req')
all_params.append('_return_http_data_only')
all_params.append('_preload_content')
all_params.append('_request_timeout')
params = locals()
for key, val in six.iteritems(params['kwargs']):
if key not in all_params:
raise TypeError(
"Got an unexpected keyword argument '%s'"
" to method initiate_canary_using_post" % key
)
params[key] = val
del params['kwargs']
# verify the required parameter 'canary_config_id' is set
if ('canary_config_id' not in params or
params['canary_config_id'] is None):
raise ValueError("Missing the required parameter `canary_config_id` when calling `initiate_canary_using_post`") # noqa: E501
# verify the required parameter 'execution_request' is set
if ('execution_request' not in params or
params['execution_request'] is None):
raise ValueError("Missing the required parameter `execution_request` when calling `initiate_canary_using_post`") # noqa: E501
collection_formats = {}
path_params = {}
if 'canary_config_id' in params:
path_params['canaryConfigId'] = params['canary_config_id'] # noqa: E501
query_params = []
if 'application' in params:
query_params.append(('application', params['application'])) # noqa: E501
if 'configuration_account_name' in params:
query_params.append(('configurationAccountName', params['configuration_account_name'])) # noqa: E501
if 'metrics_account_name' in params:
query_params.append(('metricsAccountName', params['metrics_account_name'])) # noqa: E501
if 'parent_pipeline_execution_id' in params:
query_params.append(('parentPipelineExecutionId', params['parent_pipeline_execution_id'])) # noqa: E501
if 'storage_account_name' in params:
query_params.append(('storageAccountName', params['storage_account_name'])) # noqa: E501
header_params = {}
form_params = []
local_var_files = {}
body_params = None
if 'execution_request' in params:
body_params = params['execution_request']
# HTTP header `Accept`
header_params['Accept'] = self.api_client.select_header_accept(
['*/*']) # noqa: E501
# HTTP header `Content-Type`
header_params['Content-Type'] = self.api_client.select_header_content_type( # noqa: E501
['application/json']) # noqa: E501
# Authentication setting
auth_settings = [] # noqa: E501
return self.api_client.call_api(
'/v2/canaries/canary/{canaryConfigId}', 'POST',
path_params,
query_params,
header_params,
body=body_params,
post_params=form_params,
files=local_var_files,
response_type='object', # noqa: E501
auth_settings=auth_settings,
async_req=params.get('async_req'),
_return_http_data_only=params.get('_return_http_data_only'),
_preload_content=params.get('_preload_content', True),
_request_timeout=params.get('_request_timeout'),
collection_formats=collection_formats)
def initiate_canary_with_config_using_post(self, adhoc_execution_request, **kwargs): # noqa: E501
"""Start a canary execution with the supplied canary config # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.initiate_canary_with_config_using_post(adhoc_execution_request, async_req=True)
>>> result = thread.get()
:param async_req bool
:param object adhoc_execution_request: adhocExecutionRequest (required)
:param str application: application
:param str metrics_account_name: metricsAccountName
:param str parent_pipeline_execution_id: parentPipelineExecutionId
:param str storage_account_name: storageAccountName
:return: object
If the method is called asynchronously,
returns the request thread.
"""
kwargs['_return_http_data_only'] = True
if kwargs.get('async_req'):
return self.initiate_canary_with_config_using_post_with_http_info(adhoc_execution_request, **kwargs) # noqa: E501
else:
(data) = self.initiate_canary_with_config_using_post_with_http_info(adhoc_execution_request, **kwargs) # noqa: E501
return data
def initiate_canary_with_config_using_post_with_http_info(self, adhoc_execution_request, **kwargs): # noqa: E501
"""Start a canary execution with the supplied canary config # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.initiate_canary_with_config_using_post_with_http_info(adhoc_execution_request, async_req=True)
>>> result = thread.get()
:param async_req bool
:param object adhoc_execution_request: adhocExecutionRequest (required)
:param str application: application
:param str metrics_account_name: metricsAccountName
:param str parent_pipeline_execution_id: parentPipelineExecutionId
:param str storage_account_name: storageAccountName
:return: object
If the method is called asynchronously,
returns the request thread.
"""
all_params = ['adhoc_execution_request', 'application', 'metrics_account_name', 'parent_pipeline_execution_id', 'storage_account_name'] # noqa: E501
all_params.append('async_req')
all_params.append('_return_http_data_only')
all_params.append('_preload_content')
all_params.append('_request_timeout')
params = locals()
for key, val in six.iteritems(params['kwargs']):
if key not in all_params:
raise TypeError(
"Got an unexpected keyword argument '%s'"
" to method initiate_canary_with_config_using_post" % key
)
params[key] = val
del params['kwargs']
# verify the required parameter 'adhoc_execution_request' is set
if ('adhoc_execution_request' not in params or
params['adhoc_execution_request'] is None):
raise ValueError("Missing the required parameter `adhoc_execution_request` when calling `initiate_canary_with_config_using_post`") # noqa: E501
collection_formats = {}
path_params = {}
query_params = []
if 'application' in params:
query_params.append(('application', params['application'])) # noqa: E501
if 'metrics_account_name' in params:
query_params.append(('metricsAccountName', params['metrics_account_name'])) # noqa: E501
if 'parent_pipeline_execution_id' in params:
query_params.append(('parentPipelineExecutionId', params['parent_pipeline_execution_id'])) # noqa: E501
if 'storage_account_name' in params:
query_params.append(('storageAccountName', params['storage_account_name'])) # noqa: E501
header_params = {}
form_params = []
local_var_files = {}
body_params = None
if 'adhoc_execution_request' in params:
body_params = params['adhoc_execution_request']
# HTTP header `Accept`
header_params['Accept'] = self.api_client.select_header_accept(
['*/*']) # noqa: E501
# HTTP header `Content-Type`
header_params['Content-Type'] = self.api_client.select_header_content_type( # noqa: E501
['application/json']) # noqa: E501
# Authentication setting
auth_settings = [] # noqa: E501
return self.api_client.call_api(
'/v2/canaries/canary', 'POST',
path_params,
query_params,
header_params,
body=body_params,
post_params=form_params,
files=local_var_files,
response_type='object', # noqa: E501
auth_settings=auth_settings,
async_req=params.get('async_req'),
_return_http_data_only=params.get('_return_http_data_only'),
_preload_content=params.get('_preload_content', True),
_request_timeout=params.get('_request_timeout'),
collection_formats=collection_formats)
def list_credentials_using_get(self, **kwargs): # noqa: E501
"""Retrieve a list of configured Kayenta accounts # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.list_credentials_using_get(async_req=True)
>>> result = thread.get()
:param async_req bool
:return: list[object]
If the method is called asynchronously,
returns the request thread.
"""
kwargs['_return_http_data_only'] = True
if kwargs.get('async_req'):
return self.list_credentials_using_get_with_http_info(**kwargs) # noqa: E501
else:
(data) = self.list_credentials_using_get_with_http_info(**kwargs) # noqa: E501
return data
def list_credentials_using_get_with_http_info(self, **kwargs): # noqa: E501
"""Retrieve a list of configured Kayenta accounts # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.list_credentials_using_get_with_http_info(async_req=True)
>>> result = thread.get()
:param async_req bool
:return: list[object]
If the method is called asynchronously,
returns the request thread.
"""
all_params = [] # noqa: E501
all_params.append('async_req')
all_params.append('_return_http_data_only')
all_params.append('_preload_content')
all_params.append('_request_timeout')
params = locals()
for key, val in six.iteritems(params['kwargs']):
if key not in all_params:
raise TypeError(
"Got an unexpected keyword argument '%s'"
" to method list_credentials_using_get" % key
)
params[key] = val
del params['kwargs']
collection_formats = {}
path_params = {}
query_params = []
header_params = {}
form_params = []
local_var_files = {}
body_params = None
# HTTP header `Accept`
header_params['Accept'] = self.api_client.select_header_accept(
['*/*']) # noqa: E501
# Authentication setting
auth_settings = [] # noqa: E501
return self.api_client.call_api(
'/v2/canaries/credentials', 'GET',
path_params,
query_params,
header_params,
body=body_params,
post_params=form_params,
files=local_var_files,
response_type='list[object]', # noqa: E501
auth_settings=auth_settings,
async_req=params.get('async_req'),
_return_http_data_only=params.get('_return_http_data_only'),
_preload_content=params.get('_preload_content', True),
_request_timeout=params.get('_request_timeout'),
collection_formats=collection_formats)
def list_judges_using_get(self, **kwargs): # noqa: E501
"""Retrieve a list of all configured canary judges # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.list_judges_using_get(async_req=True)
>>> result = thread.get()
:param async_req bool
:return: list[object]
If the method is called asynchronously,
returns the request thread.
"""
kwargs['_return_http_data_only'] = True
if kwargs.get('async_req'):
return self.list_judges_using_get_with_http_info(**kwargs) # noqa: E501
else:
(data) = self.list_judges_using_get_with_http_info(**kwargs) # noqa: E501
return data
def list_judges_using_get_with_http_info(self, **kwargs): # noqa: E501
"""Retrieve a list of all configured canary judges # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.list_judges_using_get_with_http_info(async_req=True)
>>> result = thread.get()
:param async_req bool
:return: list[object]
If the method is called asynchronously,
returns the request thread.
"""
all_params = [] # noqa: E501
all_params.append('async_req')
all_params.append('_return_http_data_only')
all_params.append('_preload_content')
all_params.append('_request_timeout')
params = locals()
for key, val in six.iteritems(params['kwargs']):
if key not in all_params:
raise TypeError(
"Got an unexpected keyword argument '%s'"
" to method list_judges_using_get" % key
)
params[key] = val
del params['kwargs']
collection_formats = {}
path_params = {}
query_params = []
header_params = {}
form_params = []
local_var_files = {}
body_params = None
# HTTP header `Accept`
header_params['Accept'] = self.api_client.select_header_accept(
['*/*']) # noqa: E501
# Authentication setting
auth_settings = [] # noqa: E501
return self.api_client.call_api(
'/v2/canaries/judges', 'GET',
path_params,
query_params,
header_params,
body=body_params,
post_params=form_params,
files=local_var_files,
response_type='list[object]', # noqa: E501
auth_settings=auth_settings,
async_req=params.get('async_req'),
_return_http_data_only=params.get('_return_http_data_only'),
_preload_content=params.get('_preload_content', True),
_request_timeout=params.get('_request_timeout'),
collection_formats=collection_formats)
def list_metrics_service_metadata_using_get(self, **kwargs): # noqa: E501
"""Retrieve a list of descriptors for use in populating the canary config ui # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.list_metrics_service_metadata_using_get(async_req=True)
>>> result = thread.get()
:param async_req bool
:param str filter: filter
:param str metrics_account_name: metricsAccountName
:return: list[object]
If the method is called asynchronously,
returns the request thread.
"""
kwargs['_return_http_data_only'] = True
if kwargs.get('async_req'):
return self.list_metrics_service_metadata_using_get_with_http_info(**kwargs) # noqa: E501
else:
(data) = self.list_metrics_service_metadata_using_get_with_http_info(**kwargs) # noqa: E501
return data
def list_metrics_service_metadata_using_get_with_http_info(self, **kwargs): # noqa: E501
"""Retrieve a list of descriptors for use in populating the canary config ui # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.list_metrics_service_metadata_using_get_with_http_info(async_req=True)
>>> result = thread.get()
:param async_req bool
:param str filter: filter
:param str metrics_account_name: metricsAccountName
:return: list[object]
If the method is called asynchronously,
returns the request thread.
"""
all_params = ['filter', 'metrics_account_name'] # noqa: E501
all_params.append('async_req')
all_params.append('_return_http_data_only')
all_params.append('_preload_content')
all_params.append('_request_timeout')
params = locals()
for key, val in six.iteritems(params['kwargs']):
if key not in all_params:
raise TypeError(
"Got an unexpected keyword argument '%s'"
" to method list_metrics_service_metadata_using_get" % key
)
params[key] = val
del params['kwargs']
collection_formats = {}
path_params = {}
query_params = []
if 'filter' in params:
query_params.append(('filter', params['filter'])) # noqa: E501
if 'metrics_account_name' in params:
query_params.append(('metricsAccountName', params['metrics_account_name'])) # noqa: E501
header_params = {}
form_params = []
local_var_files = {}
body_params = None
# HTTP header `Accept`
header_params['Accept'] = self.api_client.select_header_accept(
['*/*']) # noqa: E501
# Authentication setting
auth_settings = [] # noqa: E501
return self.api_client.call_api(
'/v2/canaries/metadata/metricsService', 'GET',
path_params,
query_params,
header_params,
body=body_params,
post_params=form_params,
files=local_var_files,
response_type='list[object]', # noqa: E501
auth_settings=auth_settings,
async_req=params.get('async_req'),
_return_http_data_only=params.get('_return_http_data_only'),
_preload_content=params.get('_preload_content', True),
_request_timeout=params.get('_request_timeout'),
collection_formats=collection_formats)
| 42.677215
| 201
| 0.63928
| 4,638
| 40,458
| 5.25787
| 0.043769
| 0.041991
| 0.020668
| 0.026573
| 0.959895
| 0.946691
| 0.933979
| 0.916017
| 0.898384
| 0.891126
| 0
| 0.014013
| 0.275051
| 40,458
| 947
| 202
| 42.722281
| 0.817422
| 0.323175
| 0
| 0.776699
| 1
| 0
| 0.225677
| 0.083067
| 0
| 0
| 0
| 0
| 0
| 1
| 0.036893
| false
| 0
| 0.007767
| 0
| 0.099029
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 1
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 8
|
b556bfc289bf14b9300d0b66e3f97ca9d8d66189
| 112
|
py
|
Python
|
tests/test_gevent_functional_fsm.py
|
benthomasson/gevent-functional-fsm
|
a721dee45cab9b889c9e22f6b6553c51e3590d55
|
[
"Apache-2.0"
] | null | null | null |
tests/test_gevent_functional_fsm.py
|
benthomasson/gevent-functional-fsm
|
a721dee45cab9b889c9e22f6b6553c51e3590d55
|
[
"Apache-2.0"
] | null | null | null |
tests/test_gevent_functional_fsm.py
|
benthomasson/gevent-functional-fsm
|
a721dee45cab9b889c9e22f6b6553c51e3590d55
|
[
"Apache-2.0"
] | null | null | null |
#!/usr/bin/env python
"""Tests for `gevent_functional_fsm` package."""
import pytest
def test_1():
pass
| 11.2
| 48
| 0.678571
| 16
| 112
| 4.5625
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.010753
| 0.169643
| 112
| 9
| 49
| 12.444444
| 0.774194
| 0.5625
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0.333333
| true
| 0.333333
| 0.333333
| 0
| 0.666667
| 0
| 1
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 1
| 1
| 1
| 1
| 0
| 1
| 0
|
0
| 7
|
b56341664ddab5cdaf66f1c4800db82f9356e9b4
| 107
|
py
|
Python
|
argosd/settings.py
|
danielkoster/argosd
|
e5952df10b00a95ba9edac9e057f080afe1b4d2d
|
[
"MIT"
] | 2
|
2016-09-11T14:27:32.000Z
|
2017-09-30T13:17:27.000Z
|
argosd/settings.py
|
danielkoster/argosd
|
e5952df10b00a95ba9edac9e057f080afe1b4d2d
|
[
"MIT"
] | 22
|
2016-09-03T15:27:45.000Z
|
2021-03-09T10:54:16.000Z
|
argosd/settings.py
|
danielkoster/argosd
|
e5952df10b00a95ba9edac9e057f080afe1b4d2d
|
[
"MIT"
] | null | null | null |
try:
from argosd.settings_local import *
except ImportError:
from argosd.settings_default import *
| 21.4
| 41
| 0.766355
| 13
| 107
| 6.153846
| 0.692308
| 0.25
| 0.45
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.17757
| 107
| 4
| 42
| 26.75
| 0.909091
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| true
| 0
| 0.75
| 0
| 0.75
| 0
| 1
| 0
| 0
| null | 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 0
| 1
| 0
|
0
| 7
|
a93057f7a9c4ce3bbf5a0c58b6d64b8a05011a0a
| 8,562
|
py
|
Python
|
Packs/PerimeterX/Integrations/BotDefender/BotDefender_test.py
|
diCagri/content
|
c532c50b213e6dddb8ae6a378d6d09198e08fc9f
|
[
"MIT"
] | 799
|
2016-08-02T06:43:14.000Z
|
2022-03-31T11:10:11.000Z
|
Packs/PerimeterX/Integrations/BotDefender/BotDefender_test.py
|
diCagri/content
|
c532c50b213e6dddb8ae6a378d6d09198e08fc9f
|
[
"MIT"
] | 9,317
|
2016-08-07T19:00:51.000Z
|
2022-03-31T21:56:04.000Z
|
Packs/PerimeterX/Integrations/BotDefender/BotDefender_test.py
|
diCagri/content
|
c532c50b213e6dddb8ae6a378d6d09198e08fc9f
|
[
"MIT"
] | 1,297
|
2016-08-04T13:59:00.000Z
|
2022-03-31T23:43:06.000Z
|
from BotDefender import Client, ip, perimeterx_get_investigate_details
from CommonServerPython import Common
HEADERS = {
'Authorization': 'Bearer token',
'Content-Type': 'application/json'
}
MOCK_BASE_URL = "https://api.perimeterx.com/v1/bot-defender/"
def _get_mock_url():
return f'{MOCK_BASE_URL}/investigate/mock?search=ip:test&tops=user-agent,path,socket_ip_classification'
def _test_base_score(requests_mock, actual_score, expected_score):
ip_addresses = ['5.79.76.181', '5.79.76.182']
mock_response = {
'max_risk_score': actual_score
}
mock_url = _get_mock_url()
requests_mock.get(mock_url, json=mock_response)
client = Client(base_url=mock_url, verify=False, headers=HEADERS)
args = {
'ip': ip_addresses
}
thresholds = {
'good_threshold': 5,
'suspicious_threshold': 50,
'bad_threshold': 90,
'unknown_threshold': 0
}
response = ip(client=client, args=args, thresholds=thresholds, api_key="test")
for single_response in response:
assert single_response.outputs_prefix == 'PerimeterX'
assert isinstance(single_response.indicator, Common.IP)
assert single_response.indicator.ip in ip_addresses
assert single_response.indicator.dbot_score.score == expected_score
def test_ip_high_score(requests_mock):
_test_base_score(requests_mock, actual_score=100, expected_score=3)
def test_ip_suspicious_score(requests_mock):
_test_base_score(requests_mock, actual_score=60, expected_score=2)
def test_ip_good_score(requests_mock):
_test_base_score(requests_mock, actual_score=10, expected_score=1)
def test_ip_unknown_score(requests_mock):
_test_base_score(requests_mock, actual_score=1, expected_score=0)
def _test_perimeterx_get_investigate_details_base(requests_mock, search_type):
mock_response = {
'topUserAgents': [
{'userAgentName': 'Mozilla/5.0 (Windows NT 6.1; Win64; x64 Gecko) Chrome/67.0.3396.79 Safari/537.36',
'count': 84},
{'userAgentName': 'Mozilla/5.0 (Windows NT 10.0; Win64; x64 Gecko) Chrome/65.0.3325.181 Safari/537.36',
'count': 80},
{'userAgentName': 'Mozilla/5.0 (Windows NT 6.1; Win64;x64 Gecko) Chrome/66.0.3359.170 OPR/53.0.2907.99',
'count': 78},
{'userAgentName': 'Mozilla/5.0 (Windows NT 6.1; WOW64 Gecko) Chrome/67.0.3396.87 (Edition Yx)',
'count': 76},
{'userAgentName': 'Mozilla/5.0 (Windows NT 10.0; Win64;x64 Gecko) Chrome/67.0.3396.87 OPR/54.0.2952.51',
'count': 72},
{'userAgentName': 'Mozilla/5.0 (Windows NT 10.0; Win64; x64 Gecko) Chrome/68.0.3440.75 Safari/537.36',
'count': 72},
{'userAgentName': 'Mozilla/5.0 (Windows NT 5.1 Gecko) Chrome/49.0.2623.112 Safari/537.36', 'count': 72},
{'userAgentName': 'Mozilla/5.0 (Windows NT 6.1 Gecko) Chrome/66.0.3359.181 Safari/537.36', 'count': 72},
{'userAgentName': 'Mozilla/5.0 (Windows NT 6.1 Gecko) Chrome/66.0.3359.181 Safari/537.36 Kinza/4.7.2',
'count': 72},
{'userAgentName': 'Mozilla/5.0 (Windows NT 6.3; Win64; x64 Gecko) Chrome/67.0.3396.79 Safari/537.36',
'count': 72}
],
'topURLPaths': [
{'urlPath': '/favicon.ico', 'count': 3315},
{'urlPath': '/favicon.png', 'count': 3253},
{'urlPath': '/', 'count': 3212},
{'urlPath': '/loginok/light.cgi', 'count': 1228},
{'urlPath': '/cgi-bin/way-board.cgi', 'count': 1222},
{'urlPath': '/phpmyadmin/', 'count': 205},
{'urlPath': '-', 'count': 139},
{'urlPath': '/images/icons/favicon.ico', 'count': 82},
{'urlPath': '/test.php', 'count': 48}
],
'topBlockedURLPaths': [
{'blockedURLPath': '/', 'count': 1404},
{'blockedURLPath': '/cgi-bin/way-board.cgi', 'count': 702},
{'blockedURLPath': '/loginok/light.cgi', 'count': 702}
],
'topIncidentTypes': [
{'incidentType': 'Spoof', 'count': 2106},
{'incidentType': 'Bot Behavior', 'count': 702}
],
'catpchaSolves': 200,
'trafficOverTime': [
],
'pageTypeDistributions': [
{'pageType': 'Login', 'count': 1228},
{'pageType': 'Scraping', 'count': 739},
{'pageType': 'Checkout', 'count': 139}
],
'max_risk_score': 100,
'ipClassifications': [
{'class': 'Bad Reputation', 'name': 'Bad Reputation'},
{'class': 'SharedIPs', 'name': 'Shared IPs'},
{'class': 'DataCenter', 'name': 'TAG DCIP'}
]
}
ip_address = ['5.79.76.181']
mock_url = _get_mock_url()
requests_mock.get(mock_url, json=mock_response)
client = Client(base_url=mock_url, verify=False, headers=HEADERS)
args = {
'search_type': search_type,
'search_term': ip_address
}
thresholds = {
'good_threshold': 5,
'suspicious_threshold': 50,
'bad_threshold': 90,
'unknown_threshold': 0
}
response = perimeterx_get_investigate_details(client=client, args=args, thresholds=thresholds, api_key="test_key")
assert response.outputs_prefix == 'PerimeterX'
assert response.outputs == {
'topUserAgents': [
{'userAgentName': 'Mozilla/5.0 (Windows NT 6.1; Win64; x64 Gecko) Chrome/67.0.3396.79 Safari/537.36',
'count': 84},
{'userAgentName': 'Mozilla/5.0 (Windows NT 10.0; Win64; x64 Gecko) Chrome/65.0.3325.181 Safari/537.36',
'count': 80},
{'userAgentName': 'Mozilla/5.0 (Windows NT 6.1; Win64;x64 Gecko) Chrome/66.0.3359.170 OPR/53.0.2907.99',
'count': 78},
{'userAgentName': 'Mozilla/5.0 (Windows NT 6.1; WOW64 Gecko) Chrome/67.0.3396.87 (Edition Yx)',
'count': 76},
{'userAgentName': 'Mozilla/5.0 (Windows NT 10.0; Win64;x64 Gecko) Chrome/67.0.3396.87 OPR/54.0.2952.51',
'count': 72},
{'userAgentName': 'Mozilla/5.0 (Windows NT 10.0; Win64; x64 Gecko) Chrome/68.0.3440.75 Safari/537.36',
'count': 72},
{'userAgentName': 'Mozilla/5.0 (Windows NT 5.1 Gecko) Chrome/49.0.2623.112 Safari/537.36', 'count': 72},
{'userAgentName': 'Mozilla/5.0 (Windows NT 6.1 Gecko) Chrome/66.0.3359.181 Safari/537.36', 'count': 72},
{'userAgentName': 'Mozilla/5.0 (Windows NT 6.1 Gecko) Chrome/66.0.3359.181 Safari/537.36 Kinza/4.7.2',
'count': 72},
{'userAgentName': 'Mozilla/5.0 (Windows NT 6.3; Win64; x64 Gecko) Chrome/67.0.3396.79 Safari/537.36',
'count': 72}
],
'topURLPaths': [
{'urlPath': '/favicon.ico', 'count': 3315},
{'urlPath': '/favicon.png', 'count': 3253},
{'urlPath': '/', 'count': 3212},
{'urlPath': '/loginok/light.cgi', 'count': 1228},
{'urlPath': '/cgi-bin/way-board.cgi', 'count': 1222},
{'urlPath': '/phpmyadmin/', 'count': 205},
{'urlPath': '-', 'count': 139},
{'urlPath': '/images/icons/favicon.ico', 'count': 82},
{'urlPath': '/test.php', 'count': 48}
],
'topBlockedURLPaths': [
{'blockedURLPath': '/', 'count': 1404},
{'blockedURLPath': '/cgi-bin/way-board.cgi', 'count': 702},
{'blockedURLPath': '/loginok/light.cgi', 'count': 702}
],
'topIncidentTypes': [
{'incidentType': 'Spoof', 'count': 2106},
{'incidentType': 'Bot Behavior', 'count': 702}
],
'catpchaSolves': 200,
'trafficOverTime': [
],
'pageTypeDistributions': [
{'pageType': 'Login', 'count': 1228},
{'pageType': 'Scraping', 'count': 739},
{'pageType': 'Checkout', 'count': 139}
],
'max_risk_score': 100,
'ipClassifications': [
{'class': 'Bad Reputation', 'name': 'Bad Reputation'},
{'class': 'SharedIPs', 'name': 'Shared IPs'},
{'class': 'DataCenter', 'name': 'TAG DCIP'}
]
}
def test_perimeterx_get_investigate_details_by_socket_ip(requests_mock):
return _test_perimeterx_get_investigate_details_base(requests_mock, search_type='socket_ip')
def test_perimeterx_get_investigate_details_by_true_ip(requests_mock):
return _test_perimeterx_get_investigate_details_base(requests_mock, search_type='true_ip')
| 41.765854
| 118
| 0.588414
| 1,018
| 8,562
| 4.793713
| 0.178782
| 0.081967
| 0.086066
| 0.090164
| 0.828279
| 0.813115
| 0.8125
| 0.78873
| 0.78873
| 0.768238
| 0
| 0.101472
| 0.246087
| 8,562
| 204
| 119
| 41.970588
| 0.654531
| 0
| 0
| 0.728324
| 0
| 0.121387
| 0.429573
| 0.031885
| 0
| 0
| 0
| 0
| 0.034682
| 1
| 0.052023
| false
| 0
| 0.011561
| 0.017341
| 0.080925
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 7
|
8d164324e0dd3ea59b288fe2a17a82e66b4bbdba
| 14,359
|
py
|
Python
|
models.py
|
RobinSmits/Dutch-NLP-Experiments
|
e7d48cc77650a7600341095ee236ac3760074cfc
|
[
"MIT"
] | null | null | null |
models.py
|
RobinSmits/Dutch-NLP-Experiments
|
e7d48cc77650a7600341095ee236ac3760074cfc
|
[
"MIT"
] | null | null | null |
models.py
|
RobinSmits/Dutch-NLP-Experiments
|
e7d48cc77650a7600341095ee236ac3760074cfc
|
[
"MIT"
] | null | null | null |
import tensorflow as tf
import tensorflow_addons as tfa
from transformers import *
def ModelCheckpoint(model_name: str)->tf.keras.callbacks.ModelCheckpoint:
return tf.keras.callbacks.ModelCheckpoint(model_name,
monitor = 'val_accuracy',
verbose = 1,
save_best_only = True,
save_weights_only = True,
mode = 'max',
period = 1)
### XLM-RoBERTa ######################################################################################################
def create_xlm_roberta_model_v1(use_default_weights: bool,
custom_pretrained_model_checkpoint: str,
strategy: tf.distribute.Strategy,
config: AutoConfig,
lr: float)->tf.keras.Model:
# Set Model init specs
if use_default_weights:
model_type = 'jplu/tf-xlm-roberta-base'
from_pt = False
else:
model_type = custom_pretrained_model_checkpoint
from_pt = True
# Create 'Standard' Classification Model
with strategy.scope():
model = TFXLMRobertaForSequenceClassification.from_pretrained(model_type, config = config, from_pt = from_pt)
optimizer = tf.keras.optimizers.Adam(learning_rate = lr)
loss = tf.keras.losses.SparseCategoricalCrossentropy(from_logits = True)
metric = tf.keras.metrics.SparseCategoricalAccuracy('accuracy')
model.compile(optimizer = optimizer, loss = loss, metrics = [metric])
return model
def create_xlm_roberta_model_v2(use_default_weights: bool,
custom_pretrained_model_checkpoint: str,
strategy: tf.distribute.Strategy,
config: AutoConfig,
max_len: int,
lr: float)->tf.keras.Model:
# Set Model init specs
if use_default_weights:
model_type = 'jplu/tf-xlm-roberta-base'
from_pt = False
else:
model_type = custom_pretrained_model_checkpoint
from_pt = True
# Create Custom Model
with strategy.scope():
input_ids = tf.keras.layers.Input(shape = (max_len,), dtype = tf.int32, name = 'input_ids')
input_masks = tf.keras.layers.Input(shape = (max_len,), dtype = tf.int32, name = 'attention_mask')
# Initializers
kernel_initializer = tf.keras.initializers.RandomNormal(mean = 0.0, stddev = 0.05, seed = None)
bias_initializer = tf.keras.initializers.RandomNormal(mean = 0.0, stddev = 0.05, seed = None)
transformers_model = TFRobertaModel.from_pretrained(model_type, config = config, from_pt = from_pt)
last_hidden_states = transformers_model({'input_ids': input_ids, 'attention_mask': input_masks})
x = last_hidden_states[0][:, 0, :]
x = tf.keras.layers.Dropout(0.2)(x)
outputs = tf.keras.layers.Dense(2, kernel_initializer = kernel_initializer, bias_initializer = bias_initializer)(x)
model = tf.keras.Model(inputs = [input_ids, input_masks], outputs = outputs)
optimizer = tf.keras.optimizers.Adam(learning_rate = lr)
loss = tf.keras.losses.SparseCategoricalCrossentropy(from_logits = True)
metric = tf.keras.metrics.SparseCategoricalAccuracy('accuracy')
# Compile
model.compile(optimizer = optimizer, loss = loss, metrics = [metric])
return model
def create_xlm_roberta_model_v3(model_type: str,
strategy: tf.distribute.Strategy,
config: AutoConfig,
max_len: int)->tf.keras.Model:
# Create Custom Model
with strategy.scope():
input_ids = tf.keras.layers.Input(shape = (max_len,), dtype = tf.int32, name = 'input_ids')
input_masks = tf.keras.layers.Input(shape = (max_len,), dtype = tf.int32, name = 'attention_mask')
transformers_model = TFRobertaModel.from_pretrained('jplu/tf-xlm-roberta-base', config = config)
output_dict = transformers_model({'input_ids': input_ids, 'attention_mask': input_masks})
last_hidden_state = output_dict.last_hidden_state
outputs = last_hidden_state[:, 0, :]
model = tf.keras.Model(inputs = [input_ids, input_masks], outputs = outputs)
return model
### Multi-Lingual BERT ######################################################################################################
def create_mbert_model_v1(use_default_weights: bool,
custom_pretrained_model_checkpoint: str,
strategy: tf.distribute.Strategy,
config: AutoConfig,
lr: float)->tf.keras.Model:
# Set Model init specs
if use_default_weights:
model_type = 'bert-base-multilingual-cased'
from_pt = False
else:
model_type = custom_pretrained_model_checkpoint
from_pt = True
# Create 'Standard' Classification Model
with strategy.scope():
model = TFBertForSequenceClassification.from_pretrained(model_type, config = config, from_pt = from_pt)
optimizer = tf.keras.optimizers.Adam(learning_rate = lr)
loss = tf.keras.losses.SparseCategoricalCrossentropy(from_logits = True)
metric = tf.keras.metrics.SparseCategoricalAccuracy('accuracy')
model.compile(optimizer = optimizer, loss = loss, metrics = [metric])
return model
def create_mbert_model_v2(use_default_weights: bool,
custom_pretrained_model_checkpoint: str,
strategy: tf.distribute.Strategy,
config: AutoConfig,
max_len: int,
lr: float)->tf.keras.Model:
# Set Model init specs
if use_default_weights:
model_type = 'bert-base-multilingual-cased'
from_pt = False
else:
model_type = custom_pretrained_model_checkpoint
from_pt = True
# Create Custom Model
with strategy.scope():
input_ids = tf.keras.layers.Input(shape = (max_len,), dtype = tf.int32, name = 'input_ids')
input_masks = tf.keras.layers.Input(shape = (max_len,), dtype = tf.int32, name = 'attention_mask')
# Initializers
kernel_initializer = tf.keras.initializers.RandomNormal(mean = 0.0, stddev = 0.05, seed = None)
bias_initializer = tf.keras.initializers.RandomNormal(mean = 0.0, stddev = 0.05, seed = None)
transformers_model = TFBertModel.from_pretrained(model_type, config = config, from_pt = from_pt)
last_hidden_states = transformers_model({'input_ids': input_ids, 'attention_mask': input_masks})
x = last_hidden_states[0][:, 0, :]
x = tf.keras.layers.Dropout(0.2)(x)
outputs = tf.keras.layers.Dense(2, kernel_initializer = kernel_initializer, bias_initializer = bias_initializer)(x)
model = tf.keras.Model(inputs = [input_ids, input_masks], outputs = outputs)
optimizer = tf.keras.optimizers.Adam(learning_rate = lr)
loss = tf.keras.losses.SparseCategoricalCrossentropy(from_logits = True)
metric = tf.keras.metrics.SparseCategoricalAccuracy('accuracy')
# Compile
model.compile(optimizer = optimizer, loss = loss, metrics = [metric])
return model
def create_mbert_model_v3(model_type: str,
strategy: tf.distribute.Strategy,
config: AutoConfig,
max_len: int)->tf.keras.Model:
# Create Custom Model
with strategy.scope():
input_ids = tf.keras.layers.Input(shape = (max_len,), dtype = tf.int32, name = 'input_ids')
input_masks = tf.keras.layers.Input(shape = (max_len,), dtype = tf.int32, name = 'attention_mask')
transformers_model = TFBertModel.from_pretrained(model_type, config = config)
output_dict = transformers_model({'input_ids': input_ids, 'attention_mask': input_masks})
last_hidden_state = output_dict.last_hidden_state
outputs = last_hidden_state[:, 0, :]
model = tf.keras.Model(inputs = [input_ids, input_masks], outputs = outputs)
return model
### MT5 and ByT5 ############################################################################################################
class T5_Accuracy(tf.keras.metrics.Metric):
def __init__(self, label_length, name = 'accuracy', **kwargs):
super(T5_Accuracy, self).__init__(name = name, **kwargs)
self.t5_accuracy = self.add_weight(name = 'accuracy', initializer = 'zeros')
self.steps_counter = self.add_weight(name = 'steps_counter', initializer = 'zeros')
self.label_length = label_length
@tf.function
def update_state(self, y_true, y_pred, sample_weight = None):
# Reshape
y_pred = tf.reshape(y_pred, [-1, y_pred.shape[-1]])
y_true = tf.reshape(y_true, [-1])
# Get Max Indexes
y_pred = tf.math.argmax(y_pred, 1, output_type = 'int32')
# Cast to Int32
y_true = tf.cast(y_true, 'int32')
# Reshape according to max label length...we want to compare the exact predictions made.
y_pred = tf.reshape(y_pred, [-1, self.label_length])
y_true = tf.reshape(y_true, [-1, self.label_length])
# Compare Predicted and Labelled
y_comparison = tf.math.equal(y_pred, y_true)
accuracy = tf.keras.backend.mean(tf.cast(tf.math.reduce_all(y_comparison, 1), tf.keras.backend.floatx()))
self.t5_accuracy.assign_add(accuracy)
self.steps_counter.assign_add(tf.ones(shape = ()))
@tf.function
def result(self):
return self.t5_accuracy / self.steps_counter
@tf.function
def reset_state(self):
for var in self.variables:
var.assign(tf.zeros(shape = var.shape))
class KerasTFMT5ForConditionalGeneration(TFMT5ForConditionalGeneration):
def __init__(self, *args, log_dir=None, cache_dir= None, **kwargs):
super().__init__(*args, **kwargs)
self.loss_tracker= tf.keras.metrics.Mean(name = 'loss')
@tf.function
def train_step(self, data):
x = data[0]
y = x['labels']
y = tf.reshape(y, [-1, 1])
with tf.GradientTape() as tape:
outputs = self(x, training=True)
loss = outputs[0]
logits = outputs[1]
loss = tf.reduce_mean(loss)
grads = tape.gradient(loss, self.trainable_variables)
self.optimizer.apply_gradients(zip(grads, self.trainable_variables))
self.loss_tracker.update_state(loss)
self.compiled_metrics.update_state(y, logits)
metrics = {m.name: m.result() for m in self.metrics}
return metrics
def test_step(self, data):
x = data[0]
y = x["labels"]
y = tf.reshape(y, [-1, 1])
output = self(x, training = False)
loss = output[0]
loss = tf.reduce_mean(loss)
logits = output[1]
self.loss_tracker.update_state(loss)
self.compiled_metrics.update_state(y, logits)
return {m.name: m.result() for m in self.metrics}
class KerasTFByT5ForConditionalGeneration(TFT5ForConditionalGeneration):
def __init__(self, *args, log_dir=None, cache_dir= None, **kwargs):
super().__init__(*args, **kwargs)
self.loss_tracker= tf.keras.metrics.Mean(name = 'loss')
@tf.function
def train_step(self, data):
x = data[0]
y = x['labels']
y = tf.reshape(y, [-1, 1])
with tf.GradientTape() as tape:
outputs = self(x, training=True)
loss = outputs[0]
logits = outputs[1]
loss = tf.reduce_mean(loss)
grads = tape.gradient(loss, self.trainable_variables)
self.optimizer.apply_gradients(zip(grads, self.trainable_variables))
self.loss_tracker.update_state(loss)
self.compiled_metrics.update_state(y, logits)
metrics = {m.name: m.result() for m in self.metrics}
return metrics
def test_step(self, data):
x = data[0]
y = x["labels"]
y = tf.reshape(y, [-1, 1])
output = self(x, training = False)
loss = output[0]
loss = tf.reduce_mean(loss)
logits = output[1]
self.loss_tracker.update_state(loss)
self.compiled_metrics.update_state(y, logits)
return {m.name: m.result() for m in self.metrics}
def create_mt5_model(model_type: str, strategy: tf.distribute.Strategy, config: AutoConfig, lr: float, max_label_len: int, total_steps: int)->tf.keras.Model:
# Create Model
with strategy.scope():
radam = tfa.optimizers.RectifiedAdam(lr = lr, total_steps = total_steps, warmup_proportion = 0.10, min_lr = lr/3.)
ranger = tfa.optimizers.Lookahead(radam, sync_period = 6, slow_step_size = 0.5)
model = KerasTFMT5ForConditionalGeneration.from_pretrained(model_type, config = config)
model.compile(optimizer = ranger, metrics = [T5_Accuracy(label_length = max_label_len)])
return model
def create_byt5_model(model_type: str, strategy: tf.distribute.Strategy, config: AutoConfig, lr: float, max_label_len: int, total_steps: int)->tf.keras.Model:
# Create Model
with strategy.scope():
radam = tfa.optimizers.RectifiedAdam(lr = lr, total_steps = total_steps, warmup_proportion = 0.10, min_lr = lr/3.)
ranger = tfa.optimizers.Lookahead(radam, sync_period = 6, slow_step_size = 0.5)
model = KerasTFByT5ForConditionalGeneration.from_pretrained(model_type, config = config)
model.compile(optimizer = ranger, metrics = [T5_Accuracy(label_length = max_label_len)])
return model
| 43.644377
| 158
| 0.598301
| 1,619
| 14,359
| 5.090179
| 0.129092
| 0.039922
| 0.017474
| 0.030093
| 0.824172
| 0.814343
| 0.814343
| 0.804635
| 0.804635
| 0.799175
| 0
| 0.011622
| 0.280939
| 14,359
| 329
| 159
| 43.644377
| 0.786538
| 0.035657
| 0
| 0.778261
| 0
| 0
| 0.032581
| 0.009478
| 0
| 0
| 0
| 0
| 0
| 1
| 0.082609
| false
| 0
| 0.013043
| 0.008696
| 0.169565
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 7
|
8d34d8143cb6d3047c2d3f24bc945a928dd64a0e
| 294
|
py
|
Python
|
highcharts/views/__init__.py
|
nferrari/django-highcharts
|
71cff45eb81a31f8d2aa87ec8427cf26cd6a8c03
|
[
"BSD-3-Clause"
] | null | null | null |
highcharts/views/__init__.py
|
nferrari/django-highcharts
|
71cff45eb81a31f8d2aa87ec8427cf26cd6a8c03
|
[
"BSD-3-Clause"
] | null | null | null |
highcharts/views/__init__.py
|
nferrari/django-highcharts
|
71cff45eb81a31f8d2aa87ec8427cf26cd6a8c03
|
[
"BSD-3-Clause"
] | null | null | null |
from highcharts.views.bar import (
HighChartsBarView,
HighChartsStackedView,
HighChartsColumnView,
)
from highcharts.views.line import HighChartsLineView # noqa
from highcharts.views.area import HighChartsAreaView # noqa
from highcharts.views.pie import HighChartsPieView # noqa
| 32.666667
| 60
| 0.806122
| 29
| 294
| 8.172414
| 0.517241
| 0.236287
| 0.320675
| 0.194093
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.139456
| 294
| 8
| 61
| 36.75
| 0.936759
| 0.047619
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| true
| 0
| 0.5
| 0
| 0.5
| 0
| 1
| 0
| 0
| null | 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 0
| 0
| 0
|
0
| 7
|
8d63b301413a90d7ee440b1cce504e0f935b58c1
| 221
|
py
|
Python
|
checkers/__init__.py
|
kdruken/nci-checker-v2
|
e37d24e401c01f1bad348ab64160455020828810
|
[
"MIT"
] | null | null | null |
checkers/__init__.py
|
kdruken/nci-checker-v2
|
e37d24e401c01f1bad348ab64160455020828810
|
[
"MIT"
] | null | null | null |
checkers/__init__.py
|
kdruken/nci-checker-v2
|
e37d24e401c01f1bad348ab64160455020828810
|
[
"MIT"
] | null | null | null |
#!/usr/bin/env python
from checkers.check import *
from checkers.cf_check import *
from checkers.gdal_check import *
from checkers.hdf5_check import *
from checkers.acdd_check import *
from checkers.netcdf4_check import *
| 31.571429
| 36
| 0.809955
| 33
| 221
| 5.272727
| 0.393939
| 0.413793
| 0.431034
| 0.66092
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.010204
| 0.113122
| 221
| 7
| 36
| 31.571429
| 0.877551
| 0.090498
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| true
| 0
| 1
| 0
| 1
| 0
| 0
| 0
| 0
| null | 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 0
| 1
| 0
|
0
| 7
|
8d70916a218649661a360911fd068f93928014a5
| 162,782
|
py
|
Python
|
operators/multicluster-operators-subscription/python/pulumi_pulumi_kubernetes_crds_operators_multicluster_operators_subscription/apps/v1/_inputs.py
|
pulumi/pulumi-kubernetes-crds
|
372c4c0182f6b899af82d6edaad521aa14f22150
|
[
"Apache-2.0"
] | null | null | null |
operators/multicluster-operators-subscription/python/pulumi_pulumi_kubernetes_crds_operators_multicluster_operators_subscription/apps/v1/_inputs.py
|
pulumi/pulumi-kubernetes-crds
|
372c4c0182f6b899af82d6edaad521aa14f22150
|
[
"Apache-2.0"
] | 2
|
2020-09-18T17:12:23.000Z
|
2020-12-30T19:40:56.000Z
|
operators/multicluster-operators-subscription/python/pulumi_pulumi_kubernetes_crds_operators_multicluster_operators_subscription/apps/v1/_inputs.py
|
pulumi/pulumi-kubernetes-crds
|
372c4c0182f6b899af82d6edaad521aa14f22150
|
[
"Apache-2.0"
] | null | null | null |
# coding=utf-8
# *** WARNING: this file was generated by crd2pulumi. ***
# *** Do not edit by hand unless you're certain you know what you are doing! ***
import warnings
import pulumi
import pulumi.runtime
from typing import Any, Mapping, Optional, Sequence, Union
from ... import _utilities, _tables
__all__ = [
'ChannelSpecArgs',
'ChannelSpecConfigMapRefArgs',
'ChannelSpecGatesArgs',
'ChannelSpecGatesLabelSelectorArgs',
'ChannelSpecGatesLabelSelectorMatchExpressionsArgs',
'ChannelSpecSecretRefArgs',
'DeployableSpecArgs',
'DeployableSpecDependenciesArgs',
'DeployableSpecOverridesArgs',
'DeployableSpecPlacementArgs',
'DeployableSpecPlacementClusterSelectorArgs',
'DeployableSpecPlacementClusterSelectorMatchExpressionsArgs',
'DeployableSpecPlacementClustersArgs',
'DeployableSpecPlacementPlacementRefArgs',
'DeployableStatusArgs',
'HelmReleaseRepoArgs',
'HelmReleaseRepoConfigMapRefArgs',
'HelmReleaseRepoSecretRefArgs',
'HelmReleaseRepoSourceArgs',
'HelmReleaseRepoSourceGithubArgs',
'HelmReleaseRepoSourceHelmRepoArgs',
'HelmReleaseStatusArgs',
'HelmReleaseStatusConditionsArgs',
'HelmReleaseStatusDeployedReleaseArgs',
'PlacementRuleSpecArgs',
'PlacementRuleSpecClusterConditionsArgs',
'PlacementRuleSpecClusterSelectorArgs',
'PlacementRuleSpecClusterSelectorMatchExpressionsArgs',
'PlacementRuleSpecClustersArgs',
'PlacementRuleSpecPoliciesArgs',
'PlacementRuleSpecResourceHintArgs',
'PlacementRuleStatusArgs',
'PlacementRuleStatusDecisionsArgs',
'SubscriptionSpecArgs',
'SubscriptionSpecHooksecretrefArgs',
'SubscriptionSpecOverridesArgs',
'SubscriptionSpecPackageFilterArgs',
'SubscriptionSpecPackageFilterFilterRefArgs',
'SubscriptionSpecPackageFilterLabelSelectorArgs',
'SubscriptionSpecPackageFilterLabelSelectorMatchExpressionsArgs',
'SubscriptionSpecPackageOverridesArgs',
'SubscriptionSpecPlacementArgs',
'SubscriptionSpecPlacementClusterSelectorArgs',
'SubscriptionSpecPlacementClusterSelectorMatchExpressionsArgs',
'SubscriptionSpecPlacementClustersArgs',
'SubscriptionSpecPlacementPlacementRefArgs',
'SubscriptionSpecTimewindowArgs',
'SubscriptionSpecTimewindowHoursArgs',
'SubscriptionStatusArgs',
'SubscriptionStatusAnsiblejobsArgs',
'SubscriptionStatusStatusesArgs',
'SubscriptionStatusStatusesPackagesArgs',
]
@pulumi.input_type
class ChannelSpecArgs:
def __init__(__self__, *,
pathname: pulumi.Input[str],
type: pulumi.Input[str],
config_map_ref: Optional[pulumi.Input['ChannelSpecConfigMapRefArgs']] = None,
gates: Optional[pulumi.Input['ChannelSpecGatesArgs']] = None,
insecure_skip_verify: Optional[pulumi.Input[bool]] = None,
secret_ref: Optional[pulumi.Input['ChannelSpecSecretRefArgs']] = None,
source_namespaces: Optional[pulumi.Input[Sequence[pulumi.Input[str]]]] = None):
"""
ChannelSpec defines the desired state of Channel
:param pulumi.Input[str] pathname: For a `namespace` channel, pathname is the name of the namespace; For a `helmrepo` or `github` channel, pathname is the remote URL for the channel contents; For a `objectbucket` channel, pathname is the URL and name of the bucket.
:param pulumi.Input[str] type: ChannelType defines types of channel
:param pulumi.Input['ChannelSpecConfigMapRefArgs'] config_map_ref: Reference to a ConfigMap which contains additional settings for accessing the channel. For example, the `insecureSkipVerify` option for accessing HTTPS endpoints can be set in the ConfigMap to indicate a insecure connection.
:param pulumi.Input['ChannelSpecGatesArgs'] gates: Criteria for promoting a Deployable from the sourceNamespaces to Channel.
:param pulumi.Input[bool] insecure_skip_verify: Skip server TLS certificate verification for Git or Helm channel.
:param pulumi.Input['ChannelSpecSecretRefArgs'] secret_ref: For a `github` channel or a `helmrepo` channel on github, this can be used to reference a Secret which contains the credentials for authentication, i.e. `user` and `accessToken`. For a `objectbucket` channel, this can be used to reference a Secret which contains the AWS credentials, i.e. `AccessKeyID` and `SecretAccessKey`.
:param pulumi.Input[Sequence[pulumi.Input[str]]] source_namespaces: A list of namespace names from which Deployables can be promoted.
"""
pulumi.set(__self__, "pathname", pathname)
pulumi.set(__self__, "type", type)
if config_map_ref is not None:
pulumi.set(__self__, "config_map_ref", config_map_ref)
if gates is not None:
pulumi.set(__self__, "gates", gates)
if insecure_skip_verify is not None:
pulumi.set(__self__, "insecure_skip_verify", insecure_skip_verify)
if secret_ref is not None:
pulumi.set(__self__, "secret_ref", secret_ref)
if source_namespaces is not None:
pulumi.set(__self__, "source_namespaces", source_namespaces)
@property
@pulumi.getter
def pathname(self) -> pulumi.Input[str]:
"""
For a `namespace` channel, pathname is the name of the namespace; For a `helmrepo` or `github` channel, pathname is the remote URL for the channel contents; For a `objectbucket` channel, pathname is the URL and name of the bucket.
"""
return pulumi.get(self, "pathname")
@pathname.setter
def pathname(self, value: pulumi.Input[str]):
pulumi.set(self, "pathname", value)
@property
@pulumi.getter
def type(self) -> pulumi.Input[str]:
"""
ChannelType defines types of channel
"""
return pulumi.get(self, "type")
@type.setter
def type(self, value: pulumi.Input[str]):
pulumi.set(self, "type", value)
@property
@pulumi.getter(name="configMapRef")
def config_map_ref(self) -> Optional[pulumi.Input['ChannelSpecConfigMapRefArgs']]:
"""
Reference to a ConfigMap which contains additional settings for accessing the channel. For example, the `insecureSkipVerify` option for accessing HTTPS endpoints can be set in the ConfigMap to indicate a insecure connection.
"""
return pulumi.get(self, "config_map_ref")
@config_map_ref.setter
def config_map_ref(self, value: Optional[pulumi.Input['ChannelSpecConfigMapRefArgs']]):
pulumi.set(self, "config_map_ref", value)
@property
@pulumi.getter
def gates(self) -> Optional[pulumi.Input['ChannelSpecGatesArgs']]:
"""
Criteria for promoting a Deployable from the sourceNamespaces to Channel.
"""
return pulumi.get(self, "gates")
@gates.setter
def gates(self, value: Optional[pulumi.Input['ChannelSpecGatesArgs']]):
pulumi.set(self, "gates", value)
@property
@pulumi.getter(name="insecureSkipVerify")
def insecure_skip_verify(self) -> Optional[pulumi.Input[bool]]:
"""
Skip server TLS certificate verification for Git or Helm channel.
"""
return pulumi.get(self, "insecure_skip_verify")
@insecure_skip_verify.setter
def insecure_skip_verify(self, value: Optional[pulumi.Input[bool]]):
pulumi.set(self, "insecure_skip_verify", value)
@property
@pulumi.getter(name="secretRef")
def secret_ref(self) -> Optional[pulumi.Input['ChannelSpecSecretRefArgs']]:
"""
For a `github` channel or a `helmrepo` channel on github, this can be used to reference a Secret which contains the credentials for authentication, i.e. `user` and `accessToken`. For a `objectbucket` channel, this can be used to reference a Secret which contains the AWS credentials, i.e. `AccessKeyID` and `SecretAccessKey`.
"""
return pulumi.get(self, "secret_ref")
@secret_ref.setter
def secret_ref(self, value: Optional[pulumi.Input['ChannelSpecSecretRefArgs']]):
pulumi.set(self, "secret_ref", value)
@property
@pulumi.getter(name="sourceNamespaces")
def source_namespaces(self) -> Optional[pulumi.Input[Sequence[pulumi.Input[str]]]]:
"""
A list of namespace names from which Deployables can be promoted.
"""
return pulumi.get(self, "source_namespaces")
@source_namespaces.setter
def source_namespaces(self, value: Optional[pulumi.Input[Sequence[pulumi.Input[str]]]]):
pulumi.set(self, "source_namespaces", value)
@pulumi.input_type
class ChannelSpecConfigMapRefArgs:
def __init__(__self__, *,
api_version: Optional[pulumi.Input[str]] = None,
field_path: Optional[pulumi.Input[str]] = None,
kind: Optional[pulumi.Input[str]] = None,
name: Optional[pulumi.Input[str]] = None,
namespace: Optional[pulumi.Input[str]] = None,
resource_version: Optional[pulumi.Input[str]] = None,
uid: Optional[pulumi.Input[str]] = None):
"""
Reference to a ConfigMap which contains additional settings for accessing the channel. For example, the `insecureSkipVerify` option for accessing HTTPS endpoints can be set in the ConfigMap to indicate a insecure connection.
:param pulumi.Input[str] api_version: API version of the referent.
:param pulumi.Input[str] field_path: If referring to a piece of an object instead of an entire object, this string should contain a valid JSON/Go field access statement, such as desiredState.manifest.containers[2]. For example, if the object reference is to a container within a pod, this would take on a value like: "spec.containers{name}" (where "name" refers to the name of the container that triggered the event) or if no container name is specified "spec.containers[2]" (container with index 2 in this pod). This syntax is chosen only to have some well-defined way of referencing a part of an object. TODO: this design is not final and this field is subject to change in the future.
:param pulumi.Input[str] kind: Kind of the referent. More info: https://git.k8s.io/community/contributors/devel/sig-architecture/api-conventions.md#types-kinds
:param pulumi.Input[str] name: Name of the referent. More info: https://kubernetes.io/docs/concepts/overview/working-with-objects/names/#names
:param pulumi.Input[str] namespace: Namespace of the referent. More info: https://kubernetes.io/docs/concepts/overview/working-with-objects/namespaces/
:param pulumi.Input[str] resource_version: Specific resourceVersion to which this reference is made, if any. More info: https://git.k8s.io/community/contributors/devel/sig-architecture/api-conventions.md#concurrency-control-and-consistency
:param pulumi.Input[str] uid: UID of the referent. More info: https://kubernetes.io/docs/concepts/overview/working-with-objects/names/#uids
"""
if api_version is not None:
pulumi.set(__self__, "api_version", api_version)
if field_path is not None:
pulumi.set(__self__, "field_path", field_path)
if kind is not None:
pulumi.set(__self__, "kind", kind)
if name is not None:
pulumi.set(__self__, "name", name)
if namespace is not None:
pulumi.set(__self__, "namespace", namespace)
if resource_version is not None:
pulumi.set(__self__, "resource_version", resource_version)
if uid is not None:
pulumi.set(__self__, "uid", uid)
@property
@pulumi.getter(name="apiVersion")
def api_version(self) -> Optional[pulumi.Input[str]]:
"""
API version of the referent.
"""
return pulumi.get(self, "api_version")
@api_version.setter
def api_version(self, value: Optional[pulumi.Input[str]]):
pulumi.set(self, "api_version", value)
@property
@pulumi.getter(name="fieldPath")
def field_path(self) -> Optional[pulumi.Input[str]]:
"""
If referring to a piece of an object instead of an entire object, this string should contain a valid JSON/Go field access statement, such as desiredState.manifest.containers[2]. For example, if the object reference is to a container within a pod, this would take on a value like: "spec.containers{name}" (where "name" refers to the name of the container that triggered the event) or if no container name is specified "spec.containers[2]" (container with index 2 in this pod). This syntax is chosen only to have some well-defined way of referencing a part of an object. TODO: this design is not final and this field is subject to change in the future.
"""
return pulumi.get(self, "field_path")
@field_path.setter
def field_path(self, value: Optional[pulumi.Input[str]]):
pulumi.set(self, "field_path", value)
@property
@pulumi.getter
def kind(self) -> Optional[pulumi.Input[str]]:
"""
Kind of the referent. More info: https://git.k8s.io/community/contributors/devel/sig-architecture/api-conventions.md#types-kinds
"""
return pulumi.get(self, "kind")
@kind.setter
def kind(self, value: Optional[pulumi.Input[str]]):
pulumi.set(self, "kind", value)
@property
@pulumi.getter
def name(self) -> Optional[pulumi.Input[str]]:
"""
Name of the referent. More info: https://kubernetes.io/docs/concepts/overview/working-with-objects/names/#names
"""
return pulumi.get(self, "name")
@name.setter
def name(self, value: Optional[pulumi.Input[str]]):
pulumi.set(self, "name", value)
@property
@pulumi.getter
def namespace(self) -> Optional[pulumi.Input[str]]:
"""
Namespace of the referent. More info: https://kubernetes.io/docs/concepts/overview/working-with-objects/namespaces/
"""
return pulumi.get(self, "namespace")
@namespace.setter
def namespace(self, value: Optional[pulumi.Input[str]]):
pulumi.set(self, "namespace", value)
@property
@pulumi.getter(name="resourceVersion")
def resource_version(self) -> Optional[pulumi.Input[str]]:
"""
Specific resourceVersion to which this reference is made, if any. More info: https://git.k8s.io/community/contributors/devel/sig-architecture/api-conventions.md#concurrency-control-and-consistency
"""
return pulumi.get(self, "resource_version")
@resource_version.setter
def resource_version(self, value: Optional[pulumi.Input[str]]):
pulumi.set(self, "resource_version", value)
@property
@pulumi.getter
def uid(self) -> Optional[pulumi.Input[str]]:
"""
UID of the referent. More info: https://kubernetes.io/docs/concepts/overview/working-with-objects/names/#uids
"""
return pulumi.get(self, "uid")
@uid.setter
def uid(self, value: Optional[pulumi.Input[str]]):
pulumi.set(self, "uid", value)
@pulumi.input_type
class ChannelSpecGatesArgs:
def __init__(__self__, *,
annotations: Optional[pulumi.Input[Mapping[str, pulumi.Input[str]]]] = None,
label_selector: Optional[pulumi.Input['ChannelSpecGatesLabelSelectorArgs']] = None,
name: Optional[pulumi.Input[str]] = None):
"""
Criteria for promoting a Deployable from the sourceNamespaces to Channel.
:param pulumi.Input[Mapping[str, pulumi.Input[str]]] annotations: The annotations which must present on a Deployable for it to be eligible for promotion.
:param pulumi.Input['ChannelSpecGatesLabelSelectorArgs'] label_selector: A label selector for selecting the Deployables.
"""
if annotations is not None:
pulumi.set(__self__, "annotations", annotations)
if label_selector is not None:
pulumi.set(__self__, "label_selector", label_selector)
if name is not None:
pulumi.set(__self__, "name", name)
@property
@pulumi.getter
def annotations(self) -> Optional[pulumi.Input[Mapping[str, pulumi.Input[str]]]]:
"""
The annotations which must present on a Deployable for it to be eligible for promotion.
"""
return pulumi.get(self, "annotations")
@annotations.setter
def annotations(self, value: Optional[pulumi.Input[Mapping[str, pulumi.Input[str]]]]):
pulumi.set(self, "annotations", value)
@property
@pulumi.getter(name="labelSelector")
def label_selector(self) -> Optional[pulumi.Input['ChannelSpecGatesLabelSelectorArgs']]:
"""
A label selector for selecting the Deployables.
"""
return pulumi.get(self, "label_selector")
@label_selector.setter
def label_selector(self, value: Optional[pulumi.Input['ChannelSpecGatesLabelSelectorArgs']]):
pulumi.set(self, "label_selector", value)
@property
@pulumi.getter
def name(self) -> Optional[pulumi.Input[str]]:
return pulumi.get(self, "name")
@name.setter
def name(self, value: Optional[pulumi.Input[str]]):
pulumi.set(self, "name", value)
@pulumi.input_type
class ChannelSpecGatesLabelSelectorArgs:
def __init__(__self__, *,
match_expressions: Optional[pulumi.Input[Sequence[pulumi.Input['ChannelSpecGatesLabelSelectorMatchExpressionsArgs']]]] = None,
match_labels: Optional[pulumi.Input[Mapping[str, pulumi.Input[str]]]] = None):
"""
A label selector for selecting the Deployables.
:param pulumi.Input[Sequence[pulumi.Input['ChannelSpecGatesLabelSelectorMatchExpressionsArgs']]] match_expressions: matchExpressions is a list of label selector requirements. The requirements are ANDed.
:param pulumi.Input[Mapping[str, pulumi.Input[str]]] match_labels: matchLabels is a map of {key,value} pairs. A single {key,value} in the matchLabels map is equivalent to an element of matchExpressions, whose key field is "key", the operator is "In", and the values array contains only "value". The requirements are ANDed.
"""
if match_expressions is not None:
pulumi.set(__self__, "match_expressions", match_expressions)
if match_labels is not None:
pulumi.set(__self__, "match_labels", match_labels)
@property
@pulumi.getter(name="matchExpressions")
def match_expressions(self) -> Optional[pulumi.Input[Sequence[pulumi.Input['ChannelSpecGatesLabelSelectorMatchExpressionsArgs']]]]:
"""
matchExpressions is a list of label selector requirements. The requirements are ANDed.
"""
return pulumi.get(self, "match_expressions")
@match_expressions.setter
def match_expressions(self, value: Optional[pulumi.Input[Sequence[pulumi.Input['ChannelSpecGatesLabelSelectorMatchExpressionsArgs']]]]):
pulumi.set(self, "match_expressions", value)
@property
@pulumi.getter(name="matchLabels")
def match_labels(self) -> Optional[pulumi.Input[Mapping[str, pulumi.Input[str]]]]:
"""
matchLabels is a map of {key,value} pairs. A single {key,value} in the matchLabels map is equivalent to an element of matchExpressions, whose key field is "key", the operator is "In", and the values array contains only "value". The requirements are ANDed.
"""
return pulumi.get(self, "match_labels")
@match_labels.setter
def match_labels(self, value: Optional[pulumi.Input[Mapping[str, pulumi.Input[str]]]]):
pulumi.set(self, "match_labels", value)
@pulumi.input_type
class ChannelSpecGatesLabelSelectorMatchExpressionsArgs:
def __init__(__self__, *,
key: pulumi.Input[str],
operator: pulumi.Input[str],
values: Optional[pulumi.Input[Sequence[pulumi.Input[str]]]] = None):
"""
A label selector requirement is a selector that contains values, a key, and an operator that relates the key and values.
:param pulumi.Input[str] key: key is the label key that the selector applies to.
:param pulumi.Input[str] operator: operator represents a key's relationship to a set of values. Valid operators are In, NotIn, Exists and DoesNotExist.
:param pulumi.Input[Sequence[pulumi.Input[str]]] values: values is an array of string values. If the operator is In or NotIn, the values array must be non-empty. If the operator is Exists or DoesNotExist, the values array must be empty. This array is replaced during a strategic merge patch.
"""
pulumi.set(__self__, "key", key)
pulumi.set(__self__, "operator", operator)
if values is not None:
pulumi.set(__self__, "values", values)
@property
@pulumi.getter
def key(self) -> pulumi.Input[str]:
"""
key is the label key that the selector applies to.
"""
return pulumi.get(self, "key")
@key.setter
def key(self, value: pulumi.Input[str]):
pulumi.set(self, "key", value)
@property
@pulumi.getter
def operator(self) -> pulumi.Input[str]:
"""
operator represents a key's relationship to a set of values. Valid operators are In, NotIn, Exists and DoesNotExist.
"""
return pulumi.get(self, "operator")
@operator.setter
def operator(self, value: pulumi.Input[str]):
pulumi.set(self, "operator", value)
@property
@pulumi.getter
def values(self) -> Optional[pulumi.Input[Sequence[pulumi.Input[str]]]]:
"""
values is an array of string values. If the operator is In or NotIn, the values array must be non-empty. If the operator is Exists or DoesNotExist, the values array must be empty. This array is replaced during a strategic merge patch.
"""
return pulumi.get(self, "values")
@values.setter
def values(self, value: Optional[pulumi.Input[Sequence[pulumi.Input[str]]]]):
pulumi.set(self, "values", value)
@pulumi.input_type
class ChannelSpecSecretRefArgs:
def __init__(__self__, *,
api_version: Optional[pulumi.Input[str]] = None,
field_path: Optional[pulumi.Input[str]] = None,
kind: Optional[pulumi.Input[str]] = None,
name: Optional[pulumi.Input[str]] = None,
namespace: Optional[pulumi.Input[str]] = None,
resource_version: Optional[pulumi.Input[str]] = None,
uid: Optional[pulumi.Input[str]] = None):
"""
For a `github` channel or a `helmrepo` channel on github, this can be used to reference a Secret which contains the credentials for authentication, i.e. `user` and `accessToken`. For a `objectbucket` channel, this can be used to reference a Secret which contains the AWS credentials, i.e. `AccessKeyID` and `SecretAccessKey`.
:param pulumi.Input[str] api_version: API version of the referent.
:param pulumi.Input[str] field_path: If referring to a piece of an object instead of an entire object, this string should contain a valid JSON/Go field access statement, such as desiredState.manifest.containers[2]. For example, if the object reference is to a container within a pod, this would take on a value like: "spec.containers{name}" (where "name" refers to the name of the container that triggered the event) or if no container name is specified "spec.containers[2]" (container with index 2 in this pod). This syntax is chosen only to have some well-defined way of referencing a part of an object. TODO: this design is not final and this field is subject to change in the future.
:param pulumi.Input[str] kind: Kind of the referent. More info: https://git.k8s.io/community/contributors/devel/sig-architecture/api-conventions.md#types-kinds
:param pulumi.Input[str] name: Name of the referent. More info: https://kubernetes.io/docs/concepts/overview/working-with-objects/names/#names
:param pulumi.Input[str] namespace: Namespace of the referent. More info: https://kubernetes.io/docs/concepts/overview/working-with-objects/namespaces/
:param pulumi.Input[str] resource_version: Specific resourceVersion to which this reference is made, if any. More info: https://git.k8s.io/community/contributors/devel/sig-architecture/api-conventions.md#concurrency-control-and-consistency
:param pulumi.Input[str] uid: UID of the referent. More info: https://kubernetes.io/docs/concepts/overview/working-with-objects/names/#uids
"""
if api_version is not None:
pulumi.set(__self__, "api_version", api_version)
if field_path is not None:
pulumi.set(__self__, "field_path", field_path)
if kind is not None:
pulumi.set(__self__, "kind", kind)
if name is not None:
pulumi.set(__self__, "name", name)
if namespace is not None:
pulumi.set(__self__, "namespace", namespace)
if resource_version is not None:
pulumi.set(__self__, "resource_version", resource_version)
if uid is not None:
pulumi.set(__self__, "uid", uid)
@property
@pulumi.getter(name="apiVersion")
def api_version(self) -> Optional[pulumi.Input[str]]:
"""
API version of the referent.
"""
return pulumi.get(self, "api_version")
@api_version.setter
def api_version(self, value: Optional[pulumi.Input[str]]):
pulumi.set(self, "api_version", value)
@property
@pulumi.getter(name="fieldPath")
def field_path(self) -> Optional[pulumi.Input[str]]:
"""
If referring to a piece of an object instead of an entire object, this string should contain a valid JSON/Go field access statement, such as desiredState.manifest.containers[2]. For example, if the object reference is to a container within a pod, this would take on a value like: "spec.containers{name}" (where "name" refers to the name of the container that triggered the event) or if no container name is specified "spec.containers[2]" (container with index 2 in this pod). This syntax is chosen only to have some well-defined way of referencing a part of an object. TODO: this design is not final and this field is subject to change in the future.
"""
return pulumi.get(self, "field_path")
@field_path.setter
def field_path(self, value: Optional[pulumi.Input[str]]):
pulumi.set(self, "field_path", value)
@property
@pulumi.getter
def kind(self) -> Optional[pulumi.Input[str]]:
"""
Kind of the referent. More info: https://git.k8s.io/community/contributors/devel/sig-architecture/api-conventions.md#types-kinds
"""
return pulumi.get(self, "kind")
@kind.setter
def kind(self, value: Optional[pulumi.Input[str]]):
pulumi.set(self, "kind", value)
@property
@pulumi.getter
def name(self) -> Optional[pulumi.Input[str]]:
"""
Name of the referent. More info: https://kubernetes.io/docs/concepts/overview/working-with-objects/names/#names
"""
return pulumi.get(self, "name")
@name.setter
def name(self, value: Optional[pulumi.Input[str]]):
pulumi.set(self, "name", value)
@property
@pulumi.getter
def namespace(self) -> Optional[pulumi.Input[str]]:
"""
Namespace of the referent. More info: https://kubernetes.io/docs/concepts/overview/working-with-objects/namespaces/
"""
return pulumi.get(self, "namespace")
@namespace.setter
def namespace(self, value: Optional[pulumi.Input[str]]):
pulumi.set(self, "namespace", value)
@property
@pulumi.getter(name="resourceVersion")
def resource_version(self) -> Optional[pulumi.Input[str]]:
"""
Specific resourceVersion to which this reference is made, if any. More info: https://git.k8s.io/community/contributors/devel/sig-architecture/api-conventions.md#concurrency-control-and-consistency
"""
return pulumi.get(self, "resource_version")
@resource_version.setter
def resource_version(self, value: Optional[pulumi.Input[str]]):
pulumi.set(self, "resource_version", value)
@property
@pulumi.getter
def uid(self) -> Optional[pulumi.Input[str]]:
"""
UID of the referent. More info: https://kubernetes.io/docs/concepts/overview/working-with-objects/names/#uids
"""
return pulumi.get(self, "uid")
@uid.setter
def uid(self, value: Optional[pulumi.Input[str]]):
pulumi.set(self, "uid", value)
@pulumi.input_type
class DeployableSpecArgs:
def __init__(__self__, *,
template: pulumi.Input[Mapping[str, Any]],
channels: Optional[pulumi.Input[Sequence[pulumi.Input[str]]]] = None,
dependencies: Optional[pulumi.Input[Sequence[pulumi.Input['DeployableSpecDependenciesArgs']]]] = None,
overrides: Optional[pulumi.Input[Sequence[pulumi.Input['DeployableSpecOverridesArgs']]]] = None,
placement: Optional[pulumi.Input['DeployableSpecPlacementArgs']] = None):
"""
DeployableSpec defines the desired state of Deployable
:param pulumi.Input['DeployableSpecPlacementArgs'] placement: Placement field to be referenced in specs, align with Fedv2, add placementref
"""
pulumi.set(__self__, "template", template)
if channels is not None:
pulumi.set(__self__, "channels", channels)
if dependencies is not None:
pulumi.set(__self__, "dependencies", dependencies)
if overrides is not None:
pulumi.set(__self__, "overrides", overrides)
if placement is not None:
pulumi.set(__self__, "placement", placement)
@property
@pulumi.getter
def template(self) -> pulumi.Input[Mapping[str, Any]]:
return pulumi.get(self, "template")
@template.setter
def template(self, value: pulumi.Input[Mapping[str, Any]]):
pulumi.set(self, "template", value)
@property
@pulumi.getter
def channels(self) -> Optional[pulumi.Input[Sequence[pulumi.Input[str]]]]:
return pulumi.get(self, "channels")
@channels.setter
def channels(self, value: Optional[pulumi.Input[Sequence[pulumi.Input[str]]]]):
pulumi.set(self, "channels", value)
@property
@pulumi.getter
def dependencies(self) -> Optional[pulumi.Input[Sequence[pulumi.Input['DeployableSpecDependenciesArgs']]]]:
return pulumi.get(self, "dependencies")
@dependencies.setter
def dependencies(self, value: Optional[pulumi.Input[Sequence[pulumi.Input['DeployableSpecDependenciesArgs']]]]):
pulumi.set(self, "dependencies", value)
@property
@pulumi.getter
def overrides(self) -> Optional[pulumi.Input[Sequence[pulumi.Input['DeployableSpecOverridesArgs']]]]:
return pulumi.get(self, "overrides")
@overrides.setter
def overrides(self, value: Optional[pulumi.Input[Sequence[pulumi.Input['DeployableSpecOverridesArgs']]]]):
pulumi.set(self, "overrides", value)
@property
@pulumi.getter
def placement(self) -> Optional[pulumi.Input['DeployableSpecPlacementArgs']]:
"""
Placement field to be referenced in specs, align with Fedv2, add placementref
"""
return pulumi.get(self, "placement")
@placement.setter
def placement(self, value: Optional[pulumi.Input['DeployableSpecPlacementArgs']]):
pulumi.set(self, "placement", value)
@pulumi.input_type
class DeployableSpecDependenciesArgs:
def __init__(__self__, *,
annotations: Optional[pulumi.Input[Mapping[str, pulumi.Input[str]]]] = None,
api_version: Optional[pulumi.Input[str]] = None,
field_path: Optional[pulumi.Input[str]] = None,
kind: Optional[pulumi.Input[str]] = None,
labels: Optional[pulumi.Input[Mapping[str, pulumi.Input[str]]]] = None,
name: Optional[pulumi.Input[str]] = None,
namespace: Optional[pulumi.Input[str]] = None,
resource_version: Optional[pulumi.Input[str]] = None,
uid: Optional[pulumi.Input[str]] = None):
"""
Dependency of Deployable Properties field is the flexiblity for different Kind
:param pulumi.Input[str] api_version: API version of the referent.
:param pulumi.Input[str] field_path: If referring to a piece of an object instead of an entire object, this string should contain a valid JSON/Go field access statement, such as desiredState.manifest.containers[2]. For example, if the object reference is to a container within a pod, this would take on a value like: "spec.containers{name}" (where "name" refers to the name of the container that triggered the event) or if no container name is specified "spec.containers[2]" (container with index 2 in this pod). This syntax is chosen only to have some well-defined way of referencing a part of an object. TODO: this design is not final and this field is subject to change in the future.
:param pulumi.Input[str] kind: Kind of the referent. More info: https://git.k8s.io/community/contributors/devel/api-conventions.md#types-kinds
:param pulumi.Input[str] name: Name of the referent. More info: https://kubernetes.io/docs/concepts/overview/working-with-objects/names/#names
:param pulumi.Input[str] namespace: Namespace of the referent. More info: https://kubernetes.io/docs/concepts/overview/working-with-objects/namespaces/
:param pulumi.Input[str] resource_version: Specific resourceVersion to which this reference is made, if any. More info: https://git.k8s.io/community/contributors/devel/api-conventions.md#concurrency-control-and-consistency
:param pulumi.Input[str] uid: UID of the referent. More info: https://kubernetes.io/docs/concepts/overview/working-with-objects/names/#uids
"""
if annotations is not None:
pulumi.set(__self__, "annotations", annotations)
if api_version is not None:
pulumi.set(__self__, "api_version", api_version)
if field_path is not None:
pulumi.set(__self__, "field_path", field_path)
if kind is not None:
pulumi.set(__self__, "kind", kind)
if labels is not None:
pulumi.set(__self__, "labels", labels)
if name is not None:
pulumi.set(__self__, "name", name)
if namespace is not None:
pulumi.set(__self__, "namespace", namespace)
if resource_version is not None:
pulumi.set(__self__, "resource_version", resource_version)
if uid is not None:
pulumi.set(__self__, "uid", uid)
@property
@pulumi.getter
def annotations(self) -> Optional[pulumi.Input[Mapping[str, pulumi.Input[str]]]]:
return pulumi.get(self, "annotations")
@annotations.setter
def annotations(self, value: Optional[pulumi.Input[Mapping[str, pulumi.Input[str]]]]):
pulumi.set(self, "annotations", value)
@property
@pulumi.getter(name="apiVersion")
def api_version(self) -> Optional[pulumi.Input[str]]:
"""
API version of the referent.
"""
return pulumi.get(self, "api_version")
@api_version.setter
def api_version(self, value: Optional[pulumi.Input[str]]):
pulumi.set(self, "api_version", value)
@property
@pulumi.getter(name="fieldPath")
def field_path(self) -> Optional[pulumi.Input[str]]:
"""
If referring to a piece of an object instead of an entire object, this string should contain a valid JSON/Go field access statement, such as desiredState.manifest.containers[2]. For example, if the object reference is to a container within a pod, this would take on a value like: "spec.containers{name}" (where "name" refers to the name of the container that triggered the event) or if no container name is specified "spec.containers[2]" (container with index 2 in this pod). This syntax is chosen only to have some well-defined way of referencing a part of an object. TODO: this design is not final and this field is subject to change in the future.
"""
return pulumi.get(self, "field_path")
@field_path.setter
def field_path(self, value: Optional[pulumi.Input[str]]):
pulumi.set(self, "field_path", value)
@property
@pulumi.getter
def kind(self) -> Optional[pulumi.Input[str]]:
"""
Kind of the referent. More info: https://git.k8s.io/community/contributors/devel/api-conventions.md#types-kinds
"""
return pulumi.get(self, "kind")
@kind.setter
def kind(self, value: Optional[pulumi.Input[str]]):
pulumi.set(self, "kind", value)
@property
@pulumi.getter
def labels(self) -> Optional[pulumi.Input[Mapping[str, pulumi.Input[str]]]]:
return pulumi.get(self, "labels")
@labels.setter
def labels(self, value: Optional[pulumi.Input[Mapping[str, pulumi.Input[str]]]]):
pulumi.set(self, "labels", value)
@property
@pulumi.getter
def name(self) -> Optional[pulumi.Input[str]]:
"""
Name of the referent. More info: https://kubernetes.io/docs/concepts/overview/working-with-objects/names/#names
"""
return pulumi.get(self, "name")
@name.setter
def name(self, value: Optional[pulumi.Input[str]]):
pulumi.set(self, "name", value)
@property
@pulumi.getter
def namespace(self) -> Optional[pulumi.Input[str]]:
"""
Namespace of the referent. More info: https://kubernetes.io/docs/concepts/overview/working-with-objects/namespaces/
"""
return pulumi.get(self, "namespace")
@namespace.setter
def namespace(self, value: Optional[pulumi.Input[str]]):
pulumi.set(self, "namespace", value)
@property
@pulumi.getter(name="resourceVersion")
def resource_version(self) -> Optional[pulumi.Input[str]]:
"""
Specific resourceVersion to which this reference is made, if any. More info: https://git.k8s.io/community/contributors/devel/api-conventions.md#concurrency-control-and-consistency
"""
return pulumi.get(self, "resource_version")
@resource_version.setter
def resource_version(self, value: Optional[pulumi.Input[str]]):
pulumi.set(self, "resource_version", value)
@property
@pulumi.getter
def uid(self) -> Optional[pulumi.Input[str]]:
"""
UID of the referent. More info: https://kubernetes.io/docs/concepts/overview/working-with-objects/names/#uids
"""
return pulumi.get(self, "uid")
@uid.setter
def uid(self, value: Optional[pulumi.Input[str]]):
pulumi.set(self, "uid", value)
@pulumi.input_type
class DeployableSpecOverridesArgs:
def __init__(__self__, *,
cluster_name: pulumi.Input[str],
cluster_overrides: pulumi.Input[Sequence[pulumi.Input[Mapping[str, Any]]]]):
"""
Overrides field in deployable
"""
pulumi.set(__self__, "cluster_name", cluster_name)
pulumi.set(__self__, "cluster_overrides", cluster_overrides)
@property
@pulumi.getter(name="clusterName")
def cluster_name(self) -> pulumi.Input[str]:
return pulumi.get(self, "cluster_name")
@cluster_name.setter
def cluster_name(self, value: pulumi.Input[str]):
pulumi.set(self, "cluster_name", value)
@property
@pulumi.getter(name="clusterOverrides")
def cluster_overrides(self) -> pulumi.Input[Sequence[pulumi.Input[Mapping[str, Any]]]]:
return pulumi.get(self, "cluster_overrides")
@cluster_overrides.setter
def cluster_overrides(self, value: pulumi.Input[Sequence[pulumi.Input[Mapping[str, Any]]]]):
pulumi.set(self, "cluster_overrides", value)
@pulumi.input_type
class DeployableSpecPlacementArgs:
def __init__(__self__, *,
cluster_selector: Optional[pulumi.Input['DeployableSpecPlacementClusterSelectorArgs']] = None,
clusters: Optional[pulumi.Input[Sequence[pulumi.Input['DeployableSpecPlacementClustersArgs']]]] = None,
local: Optional[pulumi.Input[bool]] = None,
placement_ref: Optional[pulumi.Input['DeployableSpecPlacementPlacementRefArgs']] = None):
"""
Placement field to be referenced in specs, align with Fedv2, add placementref
:param pulumi.Input['DeployableSpecPlacementClusterSelectorArgs'] cluster_selector: A label selector is a label query over a set of resources. The result of matchLabels and matchExpressions are ANDed. An empty label selector matches all objects. A null label selector matches no objects.
:param pulumi.Input['DeployableSpecPlacementPlacementRefArgs'] placement_ref: ObjectReference contains enough information to let you inspect or modify the referred object.
"""
if cluster_selector is not None:
pulumi.set(__self__, "cluster_selector", cluster_selector)
if clusters is not None:
pulumi.set(__self__, "clusters", clusters)
if local is not None:
pulumi.set(__self__, "local", local)
if placement_ref is not None:
pulumi.set(__self__, "placement_ref", placement_ref)
@property
@pulumi.getter(name="clusterSelector")
def cluster_selector(self) -> Optional[pulumi.Input['DeployableSpecPlacementClusterSelectorArgs']]:
"""
A label selector is a label query over a set of resources. The result of matchLabels and matchExpressions are ANDed. An empty label selector matches all objects. A null label selector matches no objects.
"""
return pulumi.get(self, "cluster_selector")
@cluster_selector.setter
def cluster_selector(self, value: Optional[pulumi.Input['DeployableSpecPlacementClusterSelectorArgs']]):
pulumi.set(self, "cluster_selector", value)
@property
@pulumi.getter
def clusters(self) -> Optional[pulumi.Input[Sequence[pulumi.Input['DeployableSpecPlacementClustersArgs']]]]:
return pulumi.get(self, "clusters")
@clusters.setter
def clusters(self, value: Optional[pulumi.Input[Sequence[pulumi.Input['DeployableSpecPlacementClustersArgs']]]]):
pulumi.set(self, "clusters", value)
@property
@pulumi.getter
def local(self) -> Optional[pulumi.Input[bool]]:
return pulumi.get(self, "local")
@local.setter
def local(self, value: Optional[pulumi.Input[bool]]):
pulumi.set(self, "local", value)
@property
@pulumi.getter(name="placementRef")
def placement_ref(self) -> Optional[pulumi.Input['DeployableSpecPlacementPlacementRefArgs']]:
"""
ObjectReference contains enough information to let you inspect or modify the referred object.
"""
return pulumi.get(self, "placement_ref")
@placement_ref.setter
def placement_ref(self, value: Optional[pulumi.Input['DeployableSpecPlacementPlacementRefArgs']]):
pulumi.set(self, "placement_ref", value)
@pulumi.input_type
class DeployableSpecPlacementClusterSelectorArgs:
def __init__(__self__, *,
match_expressions: Optional[pulumi.Input[Sequence[pulumi.Input['DeployableSpecPlacementClusterSelectorMatchExpressionsArgs']]]] = None,
match_labels: Optional[pulumi.Input[Mapping[str, pulumi.Input[str]]]] = None):
"""
A label selector is a label query over a set of resources. The result of matchLabels and matchExpressions are ANDed. An empty label selector matches all objects. A null label selector matches no objects.
:param pulumi.Input[Sequence[pulumi.Input['DeployableSpecPlacementClusterSelectorMatchExpressionsArgs']]] match_expressions: matchExpressions is a list of label selector requirements. The requirements are ANDed.
:param pulumi.Input[Mapping[str, pulumi.Input[str]]] match_labels: matchLabels is a map of {key,value} pairs. A single {key,value} in the matchLabels map is equivalent to an element of matchExpressions, whose key field is "key", the operator is "In", and the values array contains only "value". The requirements are ANDed.
"""
if match_expressions is not None:
pulumi.set(__self__, "match_expressions", match_expressions)
if match_labels is not None:
pulumi.set(__self__, "match_labels", match_labels)
@property
@pulumi.getter(name="matchExpressions")
def match_expressions(self) -> Optional[pulumi.Input[Sequence[pulumi.Input['DeployableSpecPlacementClusterSelectorMatchExpressionsArgs']]]]:
"""
matchExpressions is a list of label selector requirements. The requirements are ANDed.
"""
return pulumi.get(self, "match_expressions")
@match_expressions.setter
def match_expressions(self, value: Optional[pulumi.Input[Sequence[pulumi.Input['DeployableSpecPlacementClusterSelectorMatchExpressionsArgs']]]]):
pulumi.set(self, "match_expressions", value)
@property
@pulumi.getter(name="matchLabels")
def match_labels(self) -> Optional[pulumi.Input[Mapping[str, pulumi.Input[str]]]]:
"""
matchLabels is a map of {key,value} pairs. A single {key,value} in the matchLabels map is equivalent to an element of matchExpressions, whose key field is "key", the operator is "In", and the values array contains only "value". The requirements are ANDed.
"""
return pulumi.get(self, "match_labels")
@match_labels.setter
def match_labels(self, value: Optional[pulumi.Input[Mapping[str, pulumi.Input[str]]]]):
pulumi.set(self, "match_labels", value)
@pulumi.input_type
class DeployableSpecPlacementClusterSelectorMatchExpressionsArgs:
def __init__(__self__, *,
key: pulumi.Input[str],
operator: pulumi.Input[str],
values: Optional[pulumi.Input[Sequence[pulumi.Input[str]]]] = None):
"""
A label selector requirement is a selector that contains values, a key, and an operator that relates the key and values.
:param pulumi.Input[str] key: key is the label key that the selector applies to.
:param pulumi.Input[str] operator: operator represents a key's relationship to a set of values. Valid operators are In, NotIn, Exists and DoesNotExist.
:param pulumi.Input[Sequence[pulumi.Input[str]]] values: values is an array of string values. If the operator is In or NotIn, the values array must be non-empty. If the operator is Exists or DoesNotExist, the values array must be empty. This array is replaced during a strategic merge patch.
"""
pulumi.set(__self__, "key", key)
pulumi.set(__self__, "operator", operator)
if values is not None:
pulumi.set(__self__, "values", values)
@property
@pulumi.getter
def key(self) -> pulumi.Input[str]:
"""
key is the label key that the selector applies to.
"""
return pulumi.get(self, "key")
@key.setter
def key(self, value: pulumi.Input[str]):
pulumi.set(self, "key", value)
@property
@pulumi.getter
def operator(self) -> pulumi.Input[str]:
"""
operator represents a key's relationship to a set of values. Valid operators are In, NotIn, Exists and DoesNotExist.
"""
return pulumi.get(self, "operator")
@operator.setter
def operator(self, value: pulumi.Input[str]):
pulumi.set(self, "operator", value)
@property
@pulumi.getter
def values(self) -> Optional[pulumi.Input[Sequence[pulumi.Input[str]]]]:
"""
values is an array of string values. If the operator is In or NotIn, the values array must be non-empty. If the operator is Exists or DoesNotExist, the values array must be empty. This array is replaced during a strategic merge patch.
"""
return pulumi.get(self, "values")
@values.setter
def values(self, value: Optional[pulumi.Input[Sequence[pulumi.Input[str]]]]):
pulumi.set(self, "values", value)
@pulumi.input_type
class DeployableSpecPlacementClustersArgs:
def __init__(__self__, *,
name: pulumi.Input[str]):
"""
GenericClusterReference - in alignment with kubefed
"""
pulumi.set(__self__, "name", name)
@property
@pulumi.getter
def name(self) -> pulumi.Input[str]:
return pulumi.get(self, "name")
@name.setter
def name(self, value: pulumi.Input[str]):
pulumi.set(self, "name", value)
@pulumi.input_type
class DeployableSpecPlacementPlacementRefArgs:
def __init__(__self__, *,
api_version: Optional[pulumi.Input[str]] = None,
field_path: Optional[pulumi.Input[str]] = None,
kind: Optional[pulumi.Input[str]] = None,
name: Optional[pulumi.Input[str]] = None,
namespace: Optional[pulumi.Input[str]] = None,
resource_version: Optional[pulumi.Input[str]] = None,
uid: Optional[pulumi.Input[str]] = None):
"""
ObjectReference contains enough information to let you inspect or modify the referred object.
:param pulumi.Input[str] api_version: API version of the referent.
:param pulumi.Input[str] field_path: If referring to a piece of an object instead of an entire object, this string should contain a valid JSON/Go field access statement, such as desiredState.manifest.containers[2]. For example, if the object reference is to a container within a pod, this would take on a value like: "spec.containers{name}" (where "name" refers to the name of the container that triggered the event) or if no container name is specified "spec.containers[2]" (container with index 2 in this pod). This syntax is chosen only to have some well-defined way of referencing a part of an object. TODO: this design is not final and this field is subject to change in the future.
:param pulumi.Input[str] kind: Kind of the referent. More info: https://git.k8s.io/community/contributors/devel/api-conventions.md#types-kinds
:param pulumi.Input[str] name: Name of the referent. More info: https://kubernetes.io/docs/concepts/overview/working-with-objects/names/#names
:param pulumi.Input[str] namespace: Namespace of the referent. More info: https://kubernetes.io/docs/concepts/overview/working-with-objects/namespaces/
:param pulumi.Input[str] resource_version: Specific resourceVersion to which this reference is made, if any. More info: https://git.k8s.io/community/contributors/devel/api-conventions.md#concurrency-control-and-consistency
:param pulumi.Input[str] uid: UID of the referent. More info: https://kubernetes.io/docs/concepts/overview/working-with-objects/names/#uids
"""
if api_version is not None:
pulumi.set(__self__, "api_version", api_version)
if field_path is not None:
pulumi.set(__self__, "field_path", field_path)
if kind is not None:
pulumi.set(__self__, "kind", kind)
if name is not None:
pulumi.set(__self__, "name", name)
if namespace is not None:
pulumi.set(__self__, "namespace", namespace)
if resource_version is not None:
pulumi.set(__self__, "resource_version", resource_version)
if uid is not None:
pulumi.set(__self__, "uid", uid)
@property
@pulumi.getter(name="apiVersion")
def api_version(self) -> Optional[pulumi.Input[str]]:
"""
API version of the referent.
"""
return pulumi.get(self, "api_version")
@api_version.setter
def api_version(self, value: Optional[pulumi.Input[str]]):
pulumi.set(self, "api_version", value)
@property
@pulumi.getter(name="fieldPath")
def field_path(self) -> Optional[pulumi.Input[str]]:
"""
If referring to a piece of an object instead of an entire object, this string should contain a valid JSON/Go field access statement, such as desiredState.manifest.containers[2]. For example, if the object reference is to a container within a pod, this would take on a value like: "spec.containers{name}" (where "name" refers to the name of the container that triggered the event) or if no container name is specified "spec.containers[2]" (container with index 2 in this pod). This syntax is chosen only to have some well-defined way of referencing a part of an object. TODO: this design is not final and this field is subject to change in the future.
"""
return pulumi.get(self, "field_path")
@field_path.setter
def field_path(self, value: Optional[pulumi.Input[str]]):
pulumi.set(self, "field_path", value)
@property
@pulumi.getter
def kind(self) -> Optional[pulumi.Input[str]]:
"""
Kind of the referent. More info: https://git.k8s.io/community/contributors/devel/api-conventions.md#types-kinds
"""
return pulumi.get(self, "kind")
@kind.setter
def kind(self, value: Optional[pulumi.Input[str]]):
pulumi.set(self, "kind", value)
@property
@pulumi.getter
def name(self) -> Optional[pulumi.Input[str]]:
"""
Name of the referent. More info: https://kubernetes.io/docs/concepts/overview/working-with-objects/names/#names
"""
return pulumi.get(self, "name")
@name.setter
def name(self, value: Optional[pulumi.Input[str]]):
pulumi.set(self, "name", value)
@property
@pulumi.getter
def namespace(self) -> Optional[pulumi.Input[str]]:
"""
Namespace of the referent. More info: https://kubernetes.io/docs/concepts/overview/working-with-objects/namespaces/
"""
return pulumi.get(self, "namespace")
@namespace.setter
def namespace(self, value: Optional[pulumi.Input[str]]):
pulumi.set(self, "namespace", value)
@property
@pulumi.getter(name="resourceVersion")
def resource_version(self) -> Optional[pulumi.Input[str]]:
"""
Specific resourceVersion to which this reference is made, if any. More info: https://git.k8s.io/community/contributors/devel/api-conventions.md#concurrency-control-and-consistency
"""
return pulumi.get(self, "resource_version")
@resource_version.setter
def resource_version(self, value: Optional[pulumi.Input[str]]):
pulumi.set(self, "resource_version", value)
@property
@pulumi.getter
def uid(self) -> Optional[pulumi.Input[str]]:
"""
UID of the referent. More info: https://kubernetes.io/docs/concepts/overview/working-with-objects/names/#uids
"""
return pulumi.get(self, "uid")
@uid.setter
def uid(self, value: Optional[pulumi.Input[str]]):
pulumi.set(self, "uid", value)
@pulumi.input_type
class DeployableStatusArgs:
def __init__(__self__, *,
target_clusters: Optional[Any] = None):
"""
DeployableStatus defines the observed state of Deployable
"""
if target_clusters is not None:
pulumi.set(__self__, "target_clusters", target_clusters)
@property
@pulumi.getter(name="targetClusters")
def target_clusters(self) -> Optional[Any]:
return pulumi.get(self, "target_clusters")
@target_clusters.setter
def target_clusters(self, value: Optional[Any]):
pulumi.set(self, "target_clusters", value)
@pulumi.input_type
class HelmReleaseRepoArgs:
def __init__(__self__, *,
chart_name: Optional[pulumi.Input[str]] = None,
config_map_ref: Optional[pulumi.Input['HelmReleaseRepoConfigMapRefArgs']] = None,
insecure_skip_verify: Optional[pulumi.Input[bool]] = None,
secret_ref: Optional[pulumi.Input['HelmReleaseRepoSecretRefArgs']] = None,
source: Optional[pulumi.Input['HelmReleaseRepoSourceArgs']] = None,
version: Optional[pulumi.Input[str]] = None):
"""
HelmReleaseRepo defines the repository of HelmRelease
:param pulumi.Input[str] chart_name: ChartName is the name of the chart within the repo
:param pulumi.Input['HelmReleaseRepoConfigMapRefArgs'] config_map_ref: Configuration parameters to access the helm-repo defined in the CatalogSource
:param pulumi.Input[bool] insecure_skip_verify: Used to skip repo server's TLS certificate verification
:param pulumi.Input['HelmReleaseRepoSecretRefArgs'] secret_ref: Secret to use to access the helm-repo defined in the CatalogSource.
:param pulumi.Input['HelmReleaseRepoSourceArgs'] source: INSERT ADDITIONAL SPEC FIELDS - desired state of cluster Important: Run "operator-sdk generate k8s" to regenerate code after modifying this file Add custom validation using kubebuilder tags: https://book-v1.book.kubebuilder.io/beyond_basics/generating_crd.html Source holds the url toward the helm-chart
:param pulumi.Input[str] version: Version is the chart version
"""
if chart_name is not None:
pulumi.set(__self__, "chart_name", chart_name)
if config_map_ref is not None:
pulumi.set(__self__, "config_map_ref", config_map_ref)
if insecure_skip_verify is not None:
pulumi.set(__self__, "insecure_skip_verify", insecure_skip_verify)
if secret_ref is not None:
pulumi.set(__self__, "secret_ref", secret_ref)
if source is not None:
pulumi.set(__self__, "source", source)
if version is not None:
pulumi.set(__self__, "version", version)
@property
@pulumi.getter(name="chartName")
def chart_name(self) -> Optional[pulumi.Input[str]]:
"""
ChartName is the name of the chart within the repo
"""
return pulumi.get(self, "chart_name")
@chart_name.setter
def chart_name(self, value: Optional[pulumi.Input[str]]):
pulumi.set(self, "chart_name", value)
@property
@pulumi.getter(name="configMapRef")
def config_map_ref(self) -> Optional[pulumi.Input['HelmReleaseRepoConfigMapRefArgs']]:
"""
Configuration parameters to access the helm-repo defined in the CatalogSource
"""
return pulumi.get(self, "config_map_ref")
@config_map_ref.setter
def config_map_ref(self, value: Optional[pulumi.Input['HelmReleaseRepoConfigMapRefArgs']]):
pulumi.set(self, "config_map_ref", value)
@property
@pulumi.getter(name="insecureSkipVerify")
def insecure_skip_verify(self) -> Optional[pulumi.Input[bool]]:
"""
Used to skip repo server's TLS certificate verification
"""
return pulumi.get(self, "insecure_skip_verify")
@insecure_skip_verify.setter
def insecure_skip_verify(self, value: Optional[pulumi.Input[bool]]):
pulumi.set(self, "insecure_skip_verify", value)
@property
@pulumi.getter(name="secretRef")
def secret_ref(self) -> Optional[pulumi.Input['HelmReleaseRepoSecretRefArgs']]:
"""
Secret to use to access the helm-repo defined in the CatalogSource.
"""
return pulumi.get(self, "secret_ref")
@secret_ref.setter
def secret_ref(self, value: Optional[pulumi.Input['HelmReleaseRepoSecretRefArgs']]):
pulumi.set(self, "secret_ref", value)
@property
@pulumi.getter
def source(self) -> Optional[pulumi.Input['HelmReleaseRepoSourceArgs']]:
"""
INSERT ADDITIONAL SPEC FIELDS - desired state of cluster Important: Run "operator-sdk generate k8s" to regenerate code after modifying this file Add custom validation using kubebuilder tags: https://book-v1.book.kubebuilder.io/beyond_basics/generating_crd.html Source holds the url toward the helm-chart
"""
return pulumi.get(self, "source")
@source.setter
def source(self, value: Optional[pulumi.Input['HelmReleaseRepoSourceArgs']]):
pulumi.set(self, "source", value)
@property
@pulumi.getter
def version(self) -> Optional[pulumi.Input[str]]:
"""
Version is the chart version
"""
return pulumi.get(self, "version")
@version.setter
def version(self, value: Optional[pulumi.Input[str]]):
pulumi.set(self, "version", value)
@pulumi.input_type
class HelmReleaseRepoConfigMapRefArgs:
def __init__(__self__, *,
api_version: Optional[pulumi.Input[str]] = None,
field_path: Optional[pulumi.Input[str]] = None,
kind: Optional[pulumi.Input[str]] = None,
name: Optional[pulumi.Input[str]] = None,
namespace: Optional[pulumi.Input[str]] = None,
resource_version: Optional[pulumi.Input[str]] = None,
uid: Optional[pulumi.Input[str]] = None):
"""
Configuration parameters to access the helm-repo defined in the CatalogSource
:param pulumi.Input[str] api_version: API version of the referent.
:param pulumi.Input[str] field_path: If referring to a piece of an object instead of an entire object, this string should contain a valid JSON/Go field access statement, such as desiredState.manifest.containers[2]. For example, if the object reference is to a container within a pod, this would take on a value like: "spec.containers{name}" (where "name" refers to the name of the container that triggered the event) or if no container name is specified "spec.containers[2]" (container with index 2 in this pod). This syntax is chosen only to have some well-defined way of referencing a part of an object. TODO: this design is not final and this field is subject to change in the future.
:param pulumi.Input[str] kind: Kind of the referent. More info: https://git.k8s.io/community/contributors/devel/api-conventions.md#types-kinds
:param pulumi.Input[str] name: Name of the referent. More info: https://kubernetes.io/docs/concepts/overview/working-with-objects/names/#names
:param pulumi.Input[str] namespace: Namespace of the referent. More info: https://kubernetes.io/docs/concepts/overview/working-with-objects/namespaces/
:param pulumi.Input[str] resource_version: Specific resourceVersion to which this reference is made, if any. More info: https://git.k8s.io/community/contributors/devel/api-conventions.md#concurrency-control-and-consistency
:param pulumi.Input[str] uid: UID of the referent. More info: https://kubernetes.io/docs/concepts/overview/working-with-objects/names/#uids
"""
if api_version is not None:
pulumi.set(__self__, "api_version", api_version)
if field_path is not None:
pulumi.set(__self__, "field_path", field_path)
if kind is not None:
pulumi.set(__self__, "kind", kind)
if name is not None:
pulumi.set(__self__, "name", name)
if namespace is not None:
pulumi.set(__self__, "namespace", namespace)
if resource_version is not None:
pulumi.set(__self__, "resource_version", resource_version)
if uid is not None:
pulumi.set(__self__, "uid", uid)
@property
@pulumi.getter(name="apiVersion")
def api_version(self) -> Optional[pulumi.Input[str]]:
"""
API version of the referent.
"""
return pulumi.get(self, "api_version")
@api_version.setter
def api_version(self, value: Optional[pulumi.Input[str]]):
pulumi.set(self, "api_version", value)
@property
@pulumi.getter(name="fieldPath")
def field_path(self) -> Optional[pulumi.Input[str]]:
"""
If referring to a piece of an object instead of an entire object, this string should contain a valid JSON/Go field access statement, such as desiredState.manifest.containers[2]. For example, if the object reference is to a container within a pod, this would take on a value like: "spec.containers{name}" (where "name" refers to the name of the container that triggered the event) or if no container name is specified "spec.containers[2]" (container with index 2 in this pod). This syntax is chosen only to have some well-defined way of referencing a part of an object. TODO: this design is not final and this field is subject to change in the future.
"""
return pulumi.get(self, "field_path")
@field_path.setter
def field_path(self, value: Optional[pulumi.Input[str]]):
pulumi.set(self, "field_path", value)
@property
@pulumi.getter
def kind(self) -> Optional[pulumi.Input[str]]:
"""
Kind of the referent. More info: https://git.k8s.io/community/contributors/devel/api-conventions.md#types-kinds
"""
return pulumi.get(self, "kind")
@kind.setter
def kind(self, value: Optional[pulumi.Input[str]]):
pulumi.set(self, "kind", value)
@property
@pulumi.getter
def name(self) -> Optional[pulumi.Input[str]]:
"""
Name of the referent. More info: https://kubernetes.io/docs/concepts/overview/working-with-objects/names/#names
"""
return pulumi.get(self, "name")
@name.setter
def name(self, value: Optional[pulumi.Input[str]]):
pulumi.set(self, "name", value)
@property
@pulumi.getter
def namespace(self) -> Optional[pulumi.Input[str]]:
"""
Namespace of the referent. More info: https://kubernetes.io/docs/concepts/overview/working-with-objects/namespaces/
"""
return pulumi.get(self, "namespace")
@namespace.setter
def namespace(self, value: Optional[pulumi.Input[str]]):
pulumi.set(self, "namespace", value)
@property
@pulumi.getter(name="resourceVersion")
def resource_version(self) -> Optional[pulumi.Input[str]]:
"""
Specific resourceVersion to which this reference is made, if any. More info: https://git.k8s.io/community/contributors/devel/api-conventions.md#concurrency-control-and-consistency
"""
return pulumi.get(self, "resource_version")
@resource_version.setter
def resource_version(self, value: Optional[pulumi.Input[str]]):
pulumi.set(self, "resource_version", value)
@property
@pulumi.getter
def uid(self) -> Optional[pulumi.Input[str]]:
"""
UID of the referent. More info: https://kubernetes.io/docs/concepts/overview/working-with-objects/names/#uids
"""
return pulumi.get(self, "uid")
@uid.setter
def uid(self, value: Optional[pulumi.Input[str]]):
pulumi.set(self, "uid", value)
@pulumi.input_type
class HelmReleaseRepoSecretRefArgs:
def __init__(__self__, *,
api_version: Optional[pulumi.Input[str]] = None,
field_path: Optional[pulumi.Input[str]] = None,
kind: Optional[pulumi.Input[str]] = None,
name: Optional[pulumi.Input[str]] = None,
namespace: Optional[pulumi.Input[str]] = None,
resource_version: Optional[pulumi.Input[str]] = None,
uid: Optional[pulumi.Input[str]] = None):
"""
Secret to use to access the helm-repo defined in the CatalogSource.
:param pulumi.Input[str] api_version: API version of the referent.
:param pulumi.Input[str] field_path: If referring to a piece of an object instead of an entire object, this string should contain a valid JSON/Go field access statement, such as desiredState.manifest.containers[2]. For example, if the object reference is to a container within a pod, this would take on a value like: "spec.containers{name}" (where "name" refers to the name of the container that triggered the event) or if no container name is specified "spec.containers[2]" (container with index 2 in this pod). This syntax is chosen only to have some well-defined way of referencing a part of an object. TODO: this design is not final and this field is subject to change in the future.
:param pulumi.Input[str] kind: Kind of the referent. More info: https://git.k8s.io/community/contributors/devel/api-conventions.md#types-kinds
:param pulumi.Input[str] name: Name of the referent. More info: https://kubernetes.io/docs/concepts/overview/working-with-objects/names/#names
:param pulumi.Input[str] namespace: Namespace of the referent. More info: https://kubernetes.io/docs/concepts/overview/working-with-objects/namespaces/
:param pulumi.Input[str] resource_version: Specific resourceVersion to which this reference is made, if any. More info: https://git.k8s.io/community/contributors/devel/api-conventions.md#concurrency-control-and-consistency
:param pulumi.Input[str] uid: UID of the referent. More info: https://kubernetes.io/docs/concepts/overview/working-with-objects/names/#uids
"""
if api_version is not None:
pulumi.set(__self__, "api_version", api_version)
if field_path is not None:
pulumi.set(__self__, "field_path", field_path)
if kind is not None:
pulumi.set(__self__, "kind", kind)
if name is not None:
pulumi.set(__self__, "name", name)
if namespace is not None:
pulumi.set(__self__, "namespace", namespace)
if resource_version is not None:
pulumi.set(__self__, "resource_version", resource_version)
if uid is not None:
pulumi.set(__self__, "uid", uid)
@property
@pulumi.getter(name="apiVersion")
def api_version(self) -> Optional[pulumi.Input[str]]:
"""
API version of the referent.
"""
return pulumi.get(self, "api_version")
@api_version.setter
def api_version(self, value: Optional[pulumi.Input[str]]):
pulumi.set(self, "api_version", value)
@property
@pulumi.getter(name="fieldPath")
def field_path(self) -> Optional[pulumi.Input[str]]:
"""
If referring to a piece of an object instead of an entire object, this string should contain a valid JSON/Go field access statement, such as desiredState.manifest.containers[2]. For example, if the object reference is to a container within a pod, this would take on a value like: "spec.containers{name}" (where "name" refers to the name of the container that triggered the event) or if no container name is specified "spec.containers[2]" (container with index 2 in this pod). This syntax is chosen only to have some well-defined way of referencing a part of an object. TODO: this design is not final and this field is subject to change in the future.
"""
return pulumi.get(self, "field_path")
@field_path.setter
def field_path(self, value: Optional[pulumi.Input[str]]):
pulumi.set(self, "field_path", value)
@property
@pulumi.getter
def kind(self) -> Optional[pulumi.Input[str]]:
"""
Kind of the referent. More info: https://git.k8s.io/community/contributors/devel/api-conventions.md#types-kinds
"""
return pulumi.get(self, "kind")
@kind.setter
def kind(self, value: Optional[pulumi.Input[str]]):
pulumi.set(self, "kind", value)
@property
@pulumi.getter
def name(self) -> Optional[pulumi.Input[str]]:
"""
Name of the referent. More info: https://kubernetes.io/docs/concepts/overview/working-with-objects/names/#names
"""
return pulumi.get(self, "name")
@name.setter
def name(self, value: Optional[pulumi.Input[str]]):
pulumi.set(self, "name", value)
@property
@pulumi.getter
def namespace(self) -> Optional[pulumi.Input[str]]:
"""
Namespace of the referent. More info: https://kubernetes.io/docs/concepts/overview/working-with-objects/namespaces/
"""
return pulumi.get(self, "namespace")
@namespace.setter
def namespace(self, value: Optional[pulumi.Input[str]]):
pulumi.set(self, "namespace", value)
@property
@pulumi.getter(name="resourceVersion")
def resource_version(self) -> Optional[pulumi.Input[str]]:
"""
Specific resourceVersion to which this reference is made, if any. More info: https://git.k8s.io/community/contributors/devel/api-conventions.md#concurrency-control-and-consistency
"""
return pulumi.get(self, "resource_version")
@resource_version.setter
def resource_version(self, value: Optional[pulumi.Input[str]]):
pulumi.set(self, "resource_version", value)
@property
@pulumi.getter
def uid(self) -> Optional[pulumi.Input[str]]:
"""
UID of the referent. More info: https://kubernetes.io/docs/concepts/overview/working-with-objects/names/#uids
"""
return pulumi.get(self, "uid")
@uid.setter
def uid(self, value: Optional[pulumi.Input[str]]):
pulumi.set(self, "uid", value)
@pulumi.input_type
class HelmReleaseRepoSourceArgs:
def __init__(__self__, *,
github: Optional[pulumi.Input['HelmReleaseRepoSourceGithubArgs']] = None,
helm_repo: Optional[pulumi.Input['HelmReleaseRepoSourceHelmRepoArgs']] = None,
type: Optional[pulumi.Input[str]] = None):
"""
INSERT ADDITIONAL SPEC FIELDS - desired state of cluster Important: Run "operator-sdk generate k8s" to regenerate code after modifying this file Add custom validation using kubebuilder tags: https://book-v1.book.kubebuilder.io/beyond_basics/generating_crd.html Source holds the url toward the helm-chart
:param pulumi.Input['HelmReleaseRepoSourceGithubArgs'] github: GitHub provides the parameters to access the helm-chart located in a github repo
:param pulumi.Input['HelmReleaseRepoSourceHelmRepoArgs'] helm_repo: HelmRepo provides the urls to retrieve the helm-chart
:param pulumi.Input[str] type: SourceTypeEnum types of sources
"""
if github is not None:
pulumi.set(__self__, "github", github)
if helm_repo is not None:
pulumi.set(__self__, "helm_repo", helm_repo)
if type is not None:
pulumi.set(__self__, "type", type)
@property
@pulumi.getter
def github(self) -> Optional[pulumi.Input['HelmReleaseRepoSourceGithubArgs']]:
"""
GitHub provides the parameters to access the helm-chart located in a github repo
"""
return pulumi.get(self, "github")
@github.setter
def github(self, value: Optional[pulumi.Input['HelmReleaseRepoSourceGithubArgs']]):
pulumi.set(self, "github", value)
@property
@pulumi.getter(name="helmRepo")
def helm_repo(self) -> Optional[pulumi.Input['HelmReleaseRepoSourceHelmRepoArgs']]:
"""
HelmRepo provides the urls to retrieve the helm-chart
"""
return pulumi.get(self, "helm_repo")
@helm_repo.setter
def helm_repo(self, value: Optional[pulumi.Input['HelmReleaseRepoSourceHelmRepoArgs']]):
pulumi.set(self, "helm_repo", value)
@property
@pulumi.getter
def type(self) -> Optional[pulumi.Input[str]]:
"""
SourceTypeEnum types of sources
"""
return pulumi.get(self, "type")
@type.setter
def type(self, value: Optional[pulumi.Input[str]]):
pulumi.set(self, "type", value)
@pulumi.input_type
class HelmReleaseRepoSourceGithubArgs:
def __init__(__self__, *,
branch: Optional[pulumi.Input[str]] = None,
chart_path: Optional[pulumi.Input[str]] = None,
urls: Optional[pulumi.Input[Sequence[pulumi.Input[str]]]] = None):
"""
GitHub provides the parameters to access the helm-chart located in a github repo
"""
if branch is not None:
pulumi.set(__self__, "branch", branch)
if chart_path is not None:
pulumi.set(__self__, "chart_path", chart_path)
if urls is not None:
pulumi.set(__self__, "urls", urls)
@property
@pulumi.getter
def branch(self) -> Optional[pulumi.Input[str]]:
return pulumi.get(self, "branch")
@branch.setter
def branch(self, value: Optional[pulumi.Input[str]]):
pulumi.set(self, "branch", value)
@property
@pulumi.getter(name="chartPath")
def chart_path(self) -> Optional[pulumi.Input[str]]:
return pulumi.get(self, "chart_path")
@chart_path.setter
def chart_path(self, value: Optional[pulumi.Input[str]]):
pulumi.set(self, "chart_path", value)
@property
@pulumi.getter
def urls(self) -> Optional[pulumi.Input[Sequence[pulumi.Input[str]]]]:
return pulumi.get(self, "urls")
@urls.setter
def urls(self, value: Optional[pulumi.Input[Sequence[pulumi.Input[str]]]]):
pulumi.set(self, "urls", value)
@pulumi.input_type
class HelmReleaseRepoSourceHelmRepoArgs:
def __init__(__self__, *,
urls: Optional[pulumi.Input[Sequence[pulumi.Input[str]]]] = None):
"""
HelmRepo provides the urls to retrieve the helm-chart
"""
if urls is not None:
pulumi.set(__self__, "urls", urls)
@property
@pulumi.getter
def urls(self) -> Optional[pulumi.Input[Sequence[pulumi.Input[str]]]]:
return pulumi.get(self, "urls")
@urls.setter
def urls(self, value: Optional[pulumi.Input[Sequence[pulumi.Input[str]]]]):
pulumi.set(self, "urls", value)
@pulumi.input_type
class HelmReleaseStatusArgs:
def __init__(__self__, *,
conditions: pulumi.Input[Sequence[pulumi.Input['HelmReleaseStatusConditionsArgs']]],
deployed_release: Optional[pulumi.Input['HelmReleaseStatusDeployedReleaseArgs']] = None):
pulumi.set(__self__, "conditions", conditions)
if deployed_release is not None:
pulumi.set(__self__, "deployed_release", deployed_release)
@property
@pulumi.getter
def conditions(self) -> pulumi.Input[Sequence[pulumi.Input['HelmReleaseStatusConditionsArgs']]]:
return pulumi.get(self, "conditions")
@conditions.setter
def conditions(self, value: pulumi.Input[Sequence[pulumi.Input['HelmReleaseStatusConditionsArgs']]]):
pulumi.set(self, "conditions", value)
@property
@pulumi.getter(name="deployedRelease")
def deployed_release(self) -> Optional[pulumi.Input['HelmReleaseStatusDeployedReleaseArgs']]:
return pulumi.get(self, "deployed_release")
@deployed_release.setter
def deployed_release(self, value: Optional[pulumi.Input['HelmReleaseStatusDeployedReleaseArgs']]):
pulumi.set(self, "deployed_release", value)
@pulumi.input_type
class HelmReleaseStatusConditionsArgs:
def __init__(__self__, *,
status: pulumi.Input[str],
type: pulumi.Input[str],
last_transition_time: Optional[pulumi.Input[str]] = None,
message: Optional[pulumi.Input[str]] = None,
reason: Optional[pulumi.Input[str]] = None):
pulumi.set(__self__, "status", status)
pulumi.set(__self__, "type", type)
if last_transition_time is not None:
pulumi.set(__self__, "last_transition_time", last_transition_time)
if message is not None:
pulumi.set(__self__, "message", message)
if reason is not None:
pulumi.set(__self__, "reason", reason)
@property
@pulumi.getter
def status(self) -> pulumi.Input[str]:
return pulumi.get(self, "status")
@status.setter
def status(self, value: pulumi.Input[str]):
pulumi.set(self, "status", value)
@property
@pulumi.getter
def type(self) -> pulumi.Input[str]:
return pulumi.get(self, "type")
@type.setter
def type(self, value: pulumi.Input[str]):
pulumi.set(self, "type", value)
@property
@pulumi.getter(name="lastTransitionTime")
def last_transition_time(self) -> Optional[pulumi.Input[str]]:
return pulumi.get(self, "last_transition_time")
@last_transition_time.setter
def last_transition_time(self, value: Optional[pulumi.Input[str]]):
pulumi.set(self, "last_transition_time", value)
@property
@pulumi.getter
def message(self) -> Optional[pulumi.Input[str]]:
return pulumi.get(self, "message")
@message.setter
def message(self, value: Optional[pulumi.Input[str]]):
pulumi.set(self, "message", value)
@property
@pulumi.getter
def reason(self) -> Optional[pulumi.Input[str]]:
return pulumi.get(self, "reason")
@reason.setter
def reason(self, value: Optional[pulumi.Input[str]]):
pulumi.set(self, "reason", value)
@pulumi.input_type
class HelmReleaseStatusDeployedReleaseArgs:
def __init__(__self__, *,
manifest: Optional[pulumi.Input[str]] = None,
name: Optional[pulumi.Input[str]] = None):
if manifest is not None:
pulumi.set(__self__, "manifest", manifest)
if name is not None:
pulumi.set(__self__, "name", name)
@property
@pulumi.getter
def manifest(self) -> Optional[pulumi.Input[str]]:
return pulumi.get(self, "manifest")
@manifest.setter
def manifest(self, value: Optional[pulumi.Input[str]]):
pulumi.set(self, "manifest", value)
@property
@pulumi.getter
def name(self) -> Optional[pulumi.Input[str]]:
return pulumi.get(self, "name")
@name.setter
def name(self, value: Optional[pulumi.Input[str]]):
pulumi.set(self, "name", value)
@pulumi.input_type
class PlacementRuleSpecArgs:
def __init__(__self__, *,
cluster_conditions: Optional[pulumi.Input[Sequence[pulumi.Input['PlacementRuleSpecClusterConditionsArgs']]]] = None,
cluster_replicas: Optional[pulumi.Input[int]] = None,
cluster_selector: Optional[pulumi.Input['PlacementRuleSpecClusterSelectorArgs']] = None,
clusters: Optional[pulumi.Input[Sequence[pulumi.Input['PlacementRuleSpecClustersArgs']]]] = None,
policies: Optional[pulumi.Input[Sequence[pulumi.Input['PlacementRuleSpecPoliciesArgs']]]] = None,
resource_hint: Optional[pulumi.Input['PlacementRuleSpecResourceHintArgs']] = None,
scheduler_name: Optional[pulumi.Input[str]] = None):
"""
PlacementRuleSpec defines the desired state of PlacementRule
:param pulumi.Input[int] cluster_replicas: number of replicas Application wants to
:param pulumi.Input['PlacementRuleSpecClusterSelectorArgs'] cluster_selector: A label selector is a label query over a set of resources. The result of matchLabels and matchExpressions are ANDed. An empty label selector matches all objects. A null label selector matches no objects.
:param pulumi.Input[Sequence[pulumi.Input['PlacementRuleSpecPoliciesArgs']]] policies: Set Policy Filters
:param pulumi.Input['PlacementRuleSpecResourceHintArgs'] resource_hint: Select Resource
:param pulumi.Input[str] scheduler_name: INSERT ADDITIONAL SPEC FIELDS - desired state of cluster Important: Run "make" to regenerate code after modifying this file schedulerName, default to use mcm controller
"""
if cluster_conditions is not None:
pulumi.set(__self__, "cluster_conditions", cluster_conditions)
if cluster_replicas is not None:
pulumi.set(__self__, "cluster_replicas", cluster_replicas)
if cluster_selector is not None:
pulumi.set(__self__, "cluster_selector", cluster_selector)
if clusters is not None:
pulumi.set(__self__, "clusters", clusters)
if policies is not None:
pulumi.set(__self__, "policies", policies)
if resource_hint is not None:
pulumi.set(__self__, "resource_hint", resource_hint)
if scheduler_name is not None:
pulumi.set(__self__, "scheduler_name", scheduler_name)
@property
@pulumi.getter(name="clusterConditions")
def cluster_conditions(self) -> Optional[pulumi.Input[Sequence[pulumi.Input['PlacementRuleSpecClusterConditionsArgs']]]]:
return pulumi.get(self, "cluster_conditions")
@cluster_conditions.setter
def cluster_conditions(self, value: Optional[pulumi.Input[Sequence[pulumi.Input['PlacementRuleSpecClusterConditionsArgs']]]]):
pulumi.set(self, "cluster_conditions", value)
@property
@pulumi.getter(name="clusterReplicas")
def cluster_replicas(self) -> Optional[pulumi.Input[int]]:
"""
number of replicas Application wants to
"""
return pulumi.get(self, "cluster_replicas")
@cluster_replicas.setter
def cluster_replicas(self, value: Optional[pulumi.Input[int]]):
pulumi.set(self, "cluster_replicas", value)
@property
@pulumi.getter(name="clusterSelector")
def cluster_selector(self) -> Optional[pulumi.Input['PlacementRuleSpecClusterSelectorArgs']]:
"""
A label selector is a label query over a set of resources. The result of matchLabels and matchExpressions are ANDed. An empty label selector matches all objects. A null label selector matches no objects.
"""
return pulumi.get(self, "cluster_selector")
@cluster_selector.setter
def cluster_selector(self, value: Optional[pulumi.Input['PlacementRuleSpecClusterSelectorArgs']]):
pulumi.set(self, "cluster_selector", value)
@property
@pulumi.getter
def clusters(self) -> Optional[pulumi.Input[Sequence[pulumi.Input['PlacementRuleSpecClustersArgs']]]]:
return pulumi.get(self, "clusters")
@clusters.setter
def clusters(self, value: Optional[pulumi.Input[Sequence[pulumi.Input['PlacementRuleSpecClustersArgs']]]]):
pulumi.set(self, "clusters", value)
@property
@pulumi.getter
def policies(self) -> Optional[pulumi.Input[Sequence[pulumi.Input['PlacementRuleSpecPoliciesArgs']]]]:
"""
Set Policy Filters
"""
return pulumi.get(self, "policies")
@policies.setter
def policies(self, value: Optional[pulumi.Input[Sequence[pulumi.Input['PlacementRuleSpecPoliciesArgs']]]]):
pulumi.set(self, "policies", value)
@property
@pulumi.getter(name="resourceHint")
def resource_hint(self) -> Optional[pulumi.Input['PlacementRuleSpecResourceHintArgs']]:
"""
Select Resource
"""
return pulumi.get(self, "resource_hint")
@resource_hint.setter
def resource_hint(self, value: Optional[pulumi.Input['PlacementRuleSpecResourceHintArgs']]):
pulumi.set(self, "resource_hint", value)
@property
@pulumi.getter(name="schedulerName")
def scheduler_name(self) -> Optional[pulumi.Input[str]]:
"""
INSERT ADDITIONAL SPEC FIELDS - desired state of cluster Important: Run "make" to regenerate code after modifying this file schedulerName, default to use mcm controller
"""
return pulumi.get(self, "scheduler_name")
@scheduler_name.setter
def scheduler_name(self, value: Optional[pulumi.Input[str]]):
pulumi.set(self, "scheduler_name", value)
@pulumi.input_type
class PlacementRuleSpecClusterConditionsArgs:
def __init__(__self__, *,
status: Optional[pulumi.Input[str]] = None,
type: Optional[pulumi.Input[str]] = None):
"""
ClusterConditionFilter defines filter to filter cluster condition
"""
if status is not None:
pulumi.set(__self__, "status", status)
if type is not None:
pulumi.set(__self__, "type", type)
@property
@pulumi.getter
def status(self) -> Optional[pulumi.Input[str]]:
return pulumi.get(self, "status")
@status.setter
def status(self, value: Optional[pulumi.Input[str]]):
pulumi.set(self, "status", value)
@property
@pulumi.getter
def type(self) -> Optional[pulumi.Input[str]]:
return pulumi.get(self, "type")
@type.setter
def type(self, value: Optional[pulumi.Input[str]]):
pulumi.set(self, "type", value)
@pulumi.input_type
class PlacementRuleSpecClusterSelectorArgs:
def __init__(__self__, *,
match_expressions: Optional[pulumi.Input[Sequence[pulumi.Input['PlacementRuleSpecClusterSelectorMatchExpressionsArgs']]]] = None,
match_labels: Optional[pulumi.Input[Mapping[str, pulumi.Input[str]]]] = None):
"""
A label selector is a label query over a set of resources. The result of matchLabels and matchExpressions are ANDed. An empty label selector matches all objects. A null label selector matches no objects.
:param pulumi.Input[Sequence[pulumi.Input['PlacementRuleSpecClusterSelectorMatchExpressionsArgs']]] match_expressions: matchExpressions is a list of label selector requirements. The requirements are ANDed.
:param pulumi.Input[Mapping[str, pulumi.Input[str]]] match_labels: matchLabels is a map of {key,value} pairs. A single {key,value} in the matchLabels map is equivalent to an element of matchExpressions, whose key field is "key", the operator is "In", and the values array contains only "value". The requirements are ANDed.
"""
if match_expressions is not None:
pulumi.set(__self__, "match_expressions", match_expressions)
if match_labels is not None:
pulumi.set(__self__, "match_labels", match_labels)
@property
@pulumi.getter(name="matchExpressions")
def match_expressions(self) -> Optional[pulumi.Input[Sequence[pulumi.Input['PlacementRuleSpecClusterSelectorMatchExpressionsArgs']]]]:
"""
matchExpressions is a list of label selector requirements. The requirements are ANDed.
"""
return pulumi.get(self, "match_expressions")
@match_expressions.setter
def match_expressions(self, value: Optional[pulumi.Input[Sequence[pulumi.Input['PlacementRuleSpecClusterSelectorMatchExpressionsArgs']]]]):
pulumi.set(self, "match_expressions", value)
@property
@pulumi.getter(name="matchLabels")
def match_labels(self) -> Optional[pulumi.Input[Mapping[str, pulumi.Input[str]]]]:
"""
matchLabels is a map of {key,value} pairs. A single {key,value} in the matchLabels map is equivalent to an element of matchExpressions, whose key field is "key", the operator is "In", and the values array contains only "value". The requirements are ANDed.
"""
return pulumi.get(self, "match_labels")
@match_labels.setter
def match_labels(self, value: Optional[pulumi.Input[Mapping[str, pulumi.Input[str]]]]):
pulumi.set(self, "match_labels", value)
@pulumi.input_type
class PlacementRuleSpecClusterSelectorMatchExpressionsArgs:
def __init__(__self__, *,
key: pulumi.Input[str],
operator: pulumi.Input[str],
values: Optional[pulumi.Input[Sequence[pulumi.Input[str]]]] = None):
"""
A label selector requirement is a selector that contains values, a key, and an operator that relates the key and values.
:param pulumi.Input[str] key: key is the label key that the selector applies to.
:param pulumi.Input[str] operator: operator represents a key's relationship to a set of values. Valid operators are In, NotIn, Exists and DoesNotExist.
:param pulumi.Input[Sequence[pulumi.Input[str]]] values: values is an array of string values. If the operator is In or NotIn, the values array must be non-empty. If the operator is Exists or DoesNotExist, the values array must be empty. This array is replaced during a strategic merge patch.
"""
pulumi.set(__self__, "key", key)
pulumi.set(__self__, "operator", operator)
if values is not None:
pulumi.set(__self__, "values", values)
@property
@pulumi.getter
def key(self) -> pulumi.Input[str]:
"""
key is the label key that the selector applies to.
"""
return pulumi.get(self, "key")
@key.setter
def key(self, value: pulumi.Input[str]):
pulumi.set(self, "key", value)
@property
@pulumi.getter
def operator(self) -> pulumi.Input[str]:
"""
operator represents a key's relationship to a set of values. Valid operators are In, NotIn, Exists and DoesNotExist.
"""
return pulumi.get(self, "operator")
@operator.setter
def operator(self, value: pulumi.Input[str]):
pulumi.set(self, "operator", value)
@property
@pulumi.getter
def values(self) -> Optional[pulumi.Input[Sequence[pulumi.Input[str]]]]:
"""
values is an array of string values. If the operator is In or NotIn, the values array must be non-empty. If the operator is Exists or DoesNotExist, the values array must be empty. This array is replaced during a strategic merge patch.
"""
return pulumi.get(self, "values")
@values.setter
def values(self, value: Optional[pulumi.Input[Sequence[pulumi.Input[str]]]]):
pulumi.set(self, "values", value)
@pulumi.input_type
class PlacementRuleSpecClustersArgs:
def __init__(__self__, *,
name: pulumi.Input[str]):
"""
GenericClusterReference - in alignment with kubefed
"""
pulumi.set(__self__, "name", name)
@property
@pulumi.getter
def name(self) -> pulumi.Input[str]:
return pulumi.get(self, "name")
@name.setter
def name(self, value: pulumi.Input[str]):
pulumi.set(self, "name", value)
@pulumi.input_type
class PlacementRuleSpecPoliciesArgs:
def __init__(__self__, *,
api_version: Optional[pulumi.Input[str]] = None,
field_path: Optional[pulumi.Input[str]] = None,
kind: Optional[pulumi.Input[str]] = None,
name: Optional[pulumi.Input[str]] = None,
namespace: Optional[pulumi.Input[str]] = None,
resource_version: Optional[pulumi.Input[str]] = None,
uid: Optional[pulumi.Input[str]] = None):
"""
ObjectReference contains enough information to let you inspect or modify the referred object. --- New uses of this type are discouraged because of difficulty describing its usage when embedded in APIs. 1. Ignored fields. It includes many fields which are not generally honored. For instance, ResourceVersion and FieldPath are both very rarely valid in actual usage. 2. Invalid usage help. It is impossible to add specific help for individual usage. In most embedded usages, there are particular restrictions like, "must refer only to types A and B" or "UID not honored" or "name must be restricted". Those cannot be well described when embedded. 3. Inconsistent validation. Because the usages are different, the validation rules are different by usage, which makes it hard for users to predict what will happen. 4. The fields are both imprecise and overly precise. Kind is not a precise mapping to a URL. This can produce ambiguity during interpretation and require a REST mapping. In most cases, the dependency is on the group,resource tuple and the version of the actual struct is irrelevant. 5. We cannot easily change it. Because this type is embedded in many locations, updates to this type will affect numerous schemas. Don't make new APIs embed an underspecified API type they do not control. Instead of using this type, create a locally provided and used type that is well-focused on your reference. For example, ServiceReferences for admission registration: https://github.com/kubernetes/api/blob/release-1.17/admissionregistration/v1/types.go#L533 .
:param pulumi.Input[str] api_version: API version of the referent.
:param pulumi.Input[str] field_path: If referring to a piece of an object instead of an entire object, this string should contain a valid JSON/Go field access statement, such as desiredState.manifest.containers[2]. For example, if the object reference is to a container within a pod, this would take on a value like: "spec.containers{name}" (where "name" refers to the name of the container that triggered the event) or if no container name is specified "spec.containers[2]" (container with index 2 in this pod). This syntax is chosen only to have some well-defined way of referencing a part of an object. TODO: this design is not final and this field is subject to change in the future.
:param pulumi.Input[str] kind: Kind of the referent. More info: https://git.k8s.io/community/contributors/devel/sig-architecture/api-conventions.md#types-kinds
:param pulumi.Input[str] name: Name of the referent. More info: https://kubernetes.io/docs/concepts/overview/working-with-objects/names/#names
:param pulumi.Input[str] namespace: Namespace of the referent. More info: https://kubernetes.io/docs/concepts/overview/working-with-objects/namespaces/
:param pulumi.Input[str] resource_version: Specific resourceVersion to which this reference is made, if any. More info: https://git.k8s.io/community/contributors/devel/sig-architecture/api-conventions.md#concurrency-control-and-consistency
:param pulumi.Input[str] uid: UID of the referent. More info: https://kubernetes.io/docs/concepts/overview/working-with-objects/names/#uids
"""
if api_version is not None:
pulumi.set(__self__, "api_version", api_version)
if field_path is not None:
pulumi.set(__self__, "field_path", field_path)
if kind is not None:
pulumi.set(__self__, "kind", kind)
if name is not None:
pulumi.set(__self__, "name", name)
if namespace is not None:
pulumi.set(__self__, "namespace", namespace)
if resource_version is not None:
pulumi.set(__self__, "resource_version", resource_version)
if uid is not None:
pulumi.set(__self__, "uid", uid)
@property
@pulumi.getter(name="apiVersion")
def api_version(self) -> Optional[pulumi.Input[str]]:
"""
API version of the referent.
"""
return pulumi.get(self, "api_version")
@api_version.setter
def api_version(self, value: Optional[pulumi.Input[str]]):
pulumi.set(self, "api_version", value)
@property
@pulumi.getter(name="fieldPath")
def field_path(self) -> Optional[pulumi.Input[str]]:
"""
If referring to a piece of an object instead of an entire object, this string should contain a valid JSON/Go field access statement, such as desiredState.manifest.containers[2]. For example, if the object reference is to a container within a pod, this would take on a value like: "spec.containers{name}" (where "name" refers to the name of the container that triggered the event) or if no container name is specified "spec.containers[2]" (container with index 2 in this pod). This syntax is chosen only to have some well-defined way of referencing a part of an object. TODO: this design is not final and this field is subject to change in the future.
"""
return pulumi.get(self, "field_path")
@field_path.setter
def field_path(self, value: Optional[pulumi.Input[str]]):
pulumi.set(self, "field_path", value)
@property
@pulumi.getter
def kind(self) -> Optional[pulumi.Input[str]]:
"""
Kind of the referent. More info: https://git.k8s.io/community/contributors/devel/sig-architecture/api-conventions.md#types-kinds
"""
return pulumi.get(self, "kind")
@kind.setter
def kind(self, value: Optional[pulumi.Input[str]]):
pulumi.set(self, "kind", value)
@property
@pulumi.getter
def name(self) -> Optional[pulumi.Input[str]]:
"""
Name of the referent. More info: https://kubernetes.io/docs/concepts/overview/working-with-objects/names/#names
"""
return pulumi.get(self, "name")
@name.setter
def name(self, value: Optional[pulumi.Input[str]]):
pulumi.set(self, "name", value)
@property
@pulumi.getter
def namespace(self) -> Optional[pulumi.Input[str]]:
"""
Namespace of the referent. More info: https://kubernetes.io/docs/concepts/overview/working-with-objects/namespaces/
"""
return pulumi.get(self, "namespace")
@namespace.setter
def namespace(self, value: Optional[pulumi.Input[str]]):
pulumi.set(self, "namespace", value)
@property
@pulumi.getter(name="resourceVersion")
def resource_version(self) -> Optional[pulumi.Input[str]]:
"""
Specific resourceVersion to which this reference is made, if any. More info: https://git.k8s.io/community/contributors/devel/sig-architecture/api-conventions.md#concurrency-control-and-consistency
"""
return pulumi.get(self, "resource_version")
@resource_version.setter
def resource_version(self, value: Optional[pulumi.Input[str]]):
pulumi.set(self, "resource_version", value)
@property
@pulumi.getter
def uid(self) -> Optional[pulumi.Input[str]]:
"""
UID of the referent. More info: https://kubernetes.io/docs/concepts/overview/working-with-objects/names/#uids
"""
return pulumi.get(self, "uid")
@uid.setter
def uid(self, value: Optional[pulumi.Input[str]]):
pulumi.set(self, "uid", value)
@pulumi.input_type
class PlacementRuleSpecResourceHintArgs:
def __init__(__self__, *,
order: Optional[pulumi.Input[str]] = None,
type: Optional[pulumi.Input[str]] = None):
"""
Select Resource
:param pulumi.Input[str] order: SelectionOrder is the type for Nodes
:param pulumi.Input[str] type: ResourceType defines types can be sorted
"""
if order is not None:
pulumi.set(__self__, "order", order)
if type is not None:
pulumi.set(__self__, "type", type)
@property
@pulumi.getter
def order(self) -> Optional[pulumi.Input[str]]:
"""
SelectionOrder is the type for Nodes
"""
return pulumi.get(self, "order")
@order.setter
def order(self, value: Optional[pulumi.Input[str]]):
pulumi.set(self, "order", value)
@property
@pulumi.getter
def type(self) -> Optional[pulumi.Input[str]]:
"""
ResourceType defines types can be sorted
"""
return pulumi.get(self, "type")
@type.setter
def type(self, value: Optional[pulumi.Input[str]]):
pulumi.set(self, "type", value)
@pulumi.input_type
class PlacementRuleStatusArgs:
def __init__(__self__, *,
decisions: Optional[pulumi.Input[Sequence[pulumi.Input['PlacementRuleStatusDecisionsArgs']]]] = None):
"""
PlacementRuleStatus defines the observed state of PlacementRule
:param pulumi.Input[Sequence[pulumi.Input['PlacementRuleStatusDecisionsArgs']]] decisions: INSERT ADDITIONAL STATUS FIELD - define observed state of cluster Important: Run "make" to regenerate code after modifying this file
"""
if decisions is not None:
pulumi.set(__self__, "decisions", decisions)
@property
@pulumi.getter
def decisions(self) -> Optional[pulumi.Input[Sequence[pulumi.Input['PlacementRuleStatusDecisionsArgs']]]]:
"""
INSERT ADDITIONAL STATUS FIELD - define observed state of cluster Important: Run "make" to regenerate code after modifying this file
"""
return pulumi.get(self, "decisions")
@decisions.setter
def decisions(self, value: Optional[pulumi.Input[Sequence[pulumi.Input['PlacementRuleStatusDecisionsArgs']]]]):
pulumi.set(self, "decisions", value)
@pulumi.input_type
class PlacementRuleStatusDecisionsArgs:
def __init__(__self__, *,
cluster_name: Optional[pulumi.Input[str]] = None,
cluster_namespace: Optional[pulumi.Input[str]] = None):
"""
PlacementDecision defines the decision made by controller
"""
if cluster_name is not None:
pulumi.set(__self__, "cluster_name", cluster_name)
if cluster_namespace is not None:
pulumi.set(__self__, "cluster_namespace", cluster_namespace)
@property
@pulumi.getter(name="clusterName")
def cluster_name(self) -> Optional[pulumi.Input[str]]:
return pulumi.get(self, "cluster_name")
@cluster_name.setter
def cluster_name(self, value: Optional[pulumi.Input[str]]):
pulumi.set(self, "cluster_name", value)
@property
@pulumi.getter(name="clusterNamespace")
def cluster_namespace(self) -> Optional[pulumi.Input[str]]:
return pulumi.get(self, "cluster_namespace")
@cluster_namespace.setter
def cluster_namespace(self, value: Optional[pulumi.Input[str]]):
pulumi.set(self, "cluster_namespace", value)
@pulumi.input_type
class SubscriptionSpecArgs:
def __init__(__self__, *,
channel: pulumi.Input[str],
hooksecretref: Optional[pulumi.Input['SubscriptionSpecHooksecretrefArgs']] = None,
name: Optional[pulumi.Input[str]] = None,
overrides: Optional[pulumi.Input[Sequence[pulumi.Input['SubscriptionSpecOverridesArgs']]]] = None,
package_filter: Optional[pulumi.Input['SubscriptionSpecPackageFilterArgs']] = None,
package_overrides: Optional[pulumi.Input[Sequence[pulumi.Input['SubscriptionSpecPackageOverridesArgs']]]] = None,
placement: Optional[pulumi.Input['SubscriptionSpecPlacementArgs']] = None,
timewindow: Optional[pulumi.Input['SubscriptionSpecTimewindowArgs']] = None):
"""
SubscriptionSpec defines the desired state of Subscription
:param pulumi.Input['SubscriptionSpecHooksecretrefArgs'] hooksecretref: ObjectReference contains enough information to let you inspect or modify the referred object. --- New uses of this type are discouraged because of difficulty describing its usage when embedded in APIs. 1. Ignored fields. It includes many fields which are not generally honored. For instance, ResourceVersion and FieldPath are both very rarely valid in actual usage. 2. Invalid usage help. It is impossible to add specific help for individual usage. In most embedded usages, there are particular restrictions like, "must refer only to types A and B" or "UID not honored" or "name must be restricted". Those cannot be well described when embedded. 3. Inconsistent validation. Because the usages are different, the validation rules are different by usage, which makes it hard for users to predict what will happen. 4. The fields are both imprecise and overly precise. Kind is not a precise mapping to a URL. This can produce ambiguity during interpretation and require a REST mapping. In most cases, the dependency is on the group,resource tuple and the version of the actual struct is irrelevant. 5. We cannot easily change it. Because this type is embedded in many locations, updates to this type will affect numerous schemas. Don't make new APIs embed an underspecified API type they do not control. Instead of using this type, create a locally provided and used type that is well-focused on your reference. For example, ServiceReferences for admission registration: https://github.com/kubernetes/api/blob/release-1.17/admissionregistration/v1/types.go#L533 .
:param pulumi.Input[str] name: To specify 1 package in channel
:param pulumi.Input[Sequence[pulumi.Input['SubscriptionSpecOverridesArgs']]] overrides: for hub use only to specify the overrides when apply to clusters
:param pulumi.Input['SubscriptionSpecPackageFilterArgs'] package_filter: To specify more than 1 package in channel
:param pulumi.Input[Sequence[pulumi.Input['SubscriptionSpecPackageOverridesArgs']]] package_overrides: To provide flexibility to override package in channel with local input
:param pulumi.Input['SubscriptionSpecPlacementArgs'] placement: For hub use only, to specify which clusters to go to
:param pulumi.Input['SubscriptionSpecTimewindowArgs'] timewindow: help user control when the subscription will take affect
"""
pulumi.set(__self__, "channel", channel)
if hooksecretref is not None:
pulumi.set(__self__, "hooksecretref", hooksecretref)
if name is not None:
pulumi.set(__self__, "name", name)
if overrides is not None:
pulumi.set(__self__, "overrides", overrides)
if package_filter is not None:
pulumi.set(__self__, "package_filter", package_filter)
if package_overrides is not None:
pulumi.set(__self__, "package_overrides", package_overrides)
if placement is not None:
pulumi.set(__self__, "placement", placement)
if timewindow is not None:
pulumi.set(__self__, "timewindow", timewindow)
@property
@pulumi.getter
def channel(self) -> pulumi.Input[str]:
return pulumi.get(self, "channel")
@channel.setter
def channel(self, value: pulumi.Input[str]):
pulumi.set(self, "channel", value)
@property
@pulumi.getter
def hooksecretref(self) -> Optional[pulumi.Input['SubscriptionSpecHooksecretrefArgs']]:
"""
ObjectReference contains enough information to let you inspect or modify the referred object. --- New uses of this type are discouraged because of difficulty describing its usage when embedded in APIs. 1. Ignored fields. It includes many fields which are not generally honored. For instance, ResourceVersion and FieldPath are both very rarely valid in actual usage. 2. Invalid usage help. It is impossible to add specific help for individual usage. In most embedded usages, there are particular restrictions like, "must refer only to types A and B" or "UID not honored" or "name must be restricted". Those cannot be well described when embedded. 3. Inconsistent validation. Because the usages are different, the validation rules are different by usage, which makes it hard for users to predict what will happen. 4. The fields are both imprecise and overly precise. Kind is not a precise mapping to a URL. This can produce ambiguity during interpretation and require a REST mapping. In most cases, the dependency is on the group,resource tuple and the version of the actual struct is irrelevant. 5. We cannot easily change it. Because this type is embedded in many locations, updates to this type will affect numerous schemas. Don't make new APIs embed an underspecified API type they do not control. Instead of using this type, create a locally provided and used type that is well-focused on your reference. For example, ServiceReferences for admission registration: https://github.com/kubernetes/api/blob/release-1.17/admissionregistration/v1/types.go#L533 .
"""
return pulumi.get(self, "hooksecretref")
@hooksecretref.setter
def hooksecretref(self, value: Optional[pulumi.Input['SubscriptionSpecHooksecretrefArgs']]):
pulumi.set(self, "hooksecretref", value)
@property
@pulumi.getter
def name(self) -> Optional[pulumi.Input[str]]:
"""
To specify 1 package in channel
"""
return pulumi.get(self, "name")
@name.setter
def name(self, value: Optional[pulumi.Input[str]]):
pulumi.set(self, "name", value)
@property
@pulumi.getter
def overrides(self) -> Optional[pulumi.Input[Sequence[pulumi.Input['SubscriptionSpecOverridesArgs']]]]:
"""
for hub use only to specify the overrides when apply to clusters
"""
return pulumi.get(self, "overrides")
@overrides.setter
def overrides(self, value: Optional[pulumi.Input[Sequence[pulumi.Input['SubscriptionSpecOverridesArgs']]]]):
pulumi.set(self, "overrides", value)
@property
@pulumi.getter(name="packageFilter")
def package_filter(self) -> Optional[pulumi.Input['SubscriptionSpecPackageFilterArgs']]:
"""
To specify more than 1 package in channel
"""
return pulumi.get(self, "package_filter")
@package_filter.setter
def package_filter(self, value: Optional[pulumi.Input['SubscriptionSpecPackageFilterArgs']]):
pulumi.set(self, "package_filter", value)
@property
@pulumi.getter(name="packageOverrides")
def package_overrides(self) -> Optional[pulumi.Input[Sequence[pulumi.Input['SubscriptionSpecPackageOverridesArgs']]]]:
"""
To provide flexibility to override package in channel with local input
"""
return pulumi.get(self, "package_overrides")
@package_overrides.setter
def package_overrides(self, value: Optional[pulumi.Input[Sequence[pulumi.Input['SubscriptionSpecPackageOverridesArgs']]]]):
pulumi.set(self, "package_overrides", value)
@property
@pulumi.getter
def placement(self) -> Optional[pulumi.Input['SubscriptionSpecPlacementArgs']]:
"""
For hub use only, to specify which clusters to go to
"""
return pulumi.get(self, "placement")
@placement.setter
def placement(self, value: Optional[pulumi.Input['SubscriptionSpecPlacementArgs']]):
pulumi.set(self, "placement", value)
@property
@pulumi.getter
def timewindow(self) -> Optional[pulumi.Input['SubscriptionSpecTimewindowArgs']]:
"""
help user control when the subscription will take affect
"""
return pulumi.get(self, "timewindow")
@timewindow.setter
def timewindow(self, value: Optional[pulumi.Input['SubscriptionSpecTimewindowArgs']]):
pulumi.set(self, "timewindow", value)
@pulumi.input_type
class SubscriptionSpecHooksecretrefArgs:
def __init__(__self__, *,
api_version: Optional[pulumi.Input[str]] = None,
field_path: Optional[pulumi.Input[str]] = None,
kind: Optional[pulumi.Input[str]] = None,
name: Optional[pulumi.Input[str]] = None,
namespace: Optional[pulumi.Input[str]] = None,
resource_version: Optional[pulumi.Input[str]] = None,
uid: Optional[pulumi.Input[str]] = None):
"""
ObjectReference contains enough information to let you inspect or modify the referred object. --- New uses of this type are discouraged because of difficulty describing its usage when embedded in APIs. 1. Ignored fields. It includes many fields which are not generally honored. For instance, ResourceVersion and FieldPath are both very rarely valid in actual usage. 2. Invalid usage help. It is impossible to add specific help for individual usage. In most embedded usages, there are particular restrictions like, "must refer only to types A and B" or "UID not honored" or "name must be restricted". Those cannot be well described when embedded. 3. Inconsistent validation. Because the usages are different, the validation rules are different by usage, which makes it hard for users to predict what will happen. 4. The fields are both imprecise and overly precise. Kind is not a precise mapping to a URL. This can produce ambiguity during interpretation and require a REST mapping. In most cases, the dependency is on the group,resource tuple and the version of the actual struct is irrelevant. 5. We cannot easily change it. Because this type is embedded in many locations, updates to this type will affect numerous schemas. Don't make new APIs embed an underspecified API type they do not control. Instead of using this type, create a locally provided and used type that is well-focused on your reference. For example, ServiceReferences for admission registration: https://github.com/kubernetes/api/blob/release-1.17/admissionregistration/v1/types.go#L533 .
:param pulumi.Input[str] api_version: API version of the referent.
:param pulumi.Input[str] field_path: If referring to a piece of an object instead of an entire object, this string should contain a valid JSON/Go field access statement, such as desiredState.manifest.containers[2]. For example, if the object reference is to a container within a pod, this would take on a value like: "spec.containers{name}" (where "name" refers to the name of the container that triggered the event) or if no container name is specified "spec.containers[2]" (container with index 2 in this pod). This syntax is chosen only to have some well-defined way of referencing a part of an object. TODO: this design is not final and this field is subject to change in the future.
:param pulumi.Input[str] kind: Kind of the referent. More info: https://git.k8s.io/community/contributors/devel/sig-architecture/api-conventions.md#types-kinds
:param pulumi.Input[str] name: Name of the referent. More info: https://kubernetes.io/docs/concepts/overview/working-with-objects/names/#names
:param pulumi.Input[str] namespace: Namespace of the referent. More info: https://kubernetes.io/docs/concepts/overview/working-with-objects/namespaces/
:param pulumi.Input[str] resource_version: Specific resourceVersion to which this reference is made, if any. More info: https://git.k8s.io/community/contributors/devel/sig-architecture/api-conventions.md#concurrency-control-and-consistency
:param pulumi.Input[str] uid: UID of the referent. More info: https://kubernetes.io/docs/concepts/overview/working-with-objects/names/#uids
"""
if api_version is not None:
pulumi.set(__self__, "api_version", api_version)
if field_path is not None:
pulumi.set(__self__, "field_path", field_path)
if kind is not None:
pulumi.set(__self__, "kind", kind)
if name is not None:
pulumi.set(__self__, "name", name)
if namespace is not None:
pulumi.set(__self__, "namespace", namespace)
if resource_version is not None:
pulumi.set(__self__, "resource_version", resource_version)
if uid is not None:
pulumi.set(__self__, "uid", uid)
@property
@pulumi.getter(name="apiVersion")
def api_version(self) -> Optional[pulumi.Input[str]]:
"""
API version of the referent.
"""
return pulumi.get(self, "api_version")
@api_version.setter
def api_version(self, value: Optional[pulumi.Input[str]]):
pulumi.set(self, "api_version", value)
@property
@pulumi.getter(name="fieldPath")
def field_path(self) -> Optional[pulumi.Input[str]]:
"""
If referring to a piece of an object instead of an entire object, this string should contain a valid JSON/Go field access statement, such as desiredState.manifest.containers[2]. For example, if the object reference is to a container within a pod, this would take on a value like: "spec.containers{name}" (where "name" refers to the name of the container that triggered the event) or if no container name is specified "spec.containers[2]" (container with index 2 in this pod). This syntax is chosen only to have some well-defined way of referencing a part of an object. TODO: this design is not final and this field is subject to change in the future.
"""
return pulumi.get(self, "field_path")
@field_path.setter
def field_path(self, value: Optional[pulumi.Input[str]]):
pulumi.set(self, "field_path", value)
@property
@pulumi.getter
def kind(self) -> Optional[pulumi.Input[str]]:
"""
Kind of the referent. More info: https://git.k8s.io/community/contributors/devel/sig-architecture/api-conventions.md#types-kinds
"""
return pulumi.get(self, "kind")
@kind.setter
def kind(self, value: Optional[pulumi.Input[str]]):
pulumi.set(self, "kind", value)
@property
@pulumi.getter
def name(self) -> Optional[pulumi.Input[str]]:
"""
Name of the referent. More info: https://kubernetes.io/docs/concepts/overview/working-with-objects/names/#names
"""
return pulumi.get(self, "name")
@name.setter
def name(self, value: Optional[pulumi.Input[str]]):
pulumi.set(self, "name", value)
@property
@pulumi.getter
def namespace(self) -> Optional[pulumi.Input[str]]:
"""
Namespace of the referent. More info: https://kubernetes.io/docs/concepts/overview/working-with-objects/namespaces/
"""
return pulumi.get(self, "namespace")
@namespace.setter
def namespace(self, value: Optional[pulumi.Input[str]]):
pulumi.set(self, "namespace", value)
@property
@pulumi.getter(name="resourceVersion")
def resource_version(self) -> Optional[pulumi.Input[str]]:
"""
Specific resourceVersion to which this reference is made, if any. More info: https://git.k8s.io/community/contributors/devel/sig-architecture/api-conventions.md#concurrency-control-and-consistency
"""
return pulumi.get(self, "resource_version")
@resource_version.setter
def resource_version(self, value: Optional[pulumi.Input[str]]):
pulumi.set(self, "resource_version", value)
@property
@pulumi.getter
def uid(self) -> Optional[pulumi.Input[str]]:
"""
UID of the referent. More info: https://kubernetes.io/docs/concepts/overview/working-with-objects/names/#uids
"""
return pulumi.get(self, "uid")
@uid.setter
def uid(self, value: Optional[pulumi.Input[str]]):
pulumi.set(self, "uid", value)
@pulumi.input_type
class SubscriptionSpecOverridesArgs:
def __init__(__self__, *,
cluster_name: pulumi.Input[str],
cluster_overrides: pulumi.Input[Sequence[pulumi.Input[Mapping[str, Any]]]]):
"""
Overrides field in deployable
"""
pulumi.set(__self__, "cluster_name", cluster_name)
pulumi.set(__self__, "cluster_overrides", cluster_overrides)
@property
@pulumi.getter(name="clusterName")
def cluster_name(self) -> pulumi.Input[str]:
return pulumi.get(self, "cluster_name")
@cluster_name.setter
def cluster_name(self, value: pulumi.Input[str]):
pulumi.set(self, "cluster_name", value)
@property
@pulumi.getter(name="clusterOverrides")
def cluster_overrides(self) -> pulumi.Input[Sequence[pulumi.Input[Mapping[str, Any]]]]:
return pulumi.get(self, "cluster_overrides")
@cluster_overrides.setter
def cluster_overrides(self, value: pulumi.Input[Sequence[pulumi.Input[Mapping[str, Any]]]]):
pulumi.set(self, "cluster_overrides", value)
@pulumi.input_type
class SubscriptionSpecPackageFilterArgs:
def __init__(__self__, *,
annotations: Optional[pulumi.Input[Mapping[str, pulumi.Input[str]]]] = None,
filter_ref: Optional[pulumi.Input['SubscriptionSpecPackageFilterFilterRefArgs']] = None,
label_selector: Optional[pulumi.Input['SubscriptionSpecPackageFilterLabelSelectorArgs']] = None,
version: Optional[pulumi.Input[str]] = None):
"""
To specify more than 1 package in channel
:param pulumi.Input['SubscriptionSpecPackageFilterFilterRefArgs'] filter_ref: LocalObjectReference contains enough information to let you locate the referenced object inside the same namespace.
:param pulumi.Input['SubscriptionSpecPackageFilterLabelSelectorArgs'] label_selector: A label selector is a label query over a set of resources. The result of matchLabels and matchExpressions are ANDed. An empty label selector matches all objects. A null label selector matches no objects.
"""
if annotations is not None:
pulumi.set(__self__, "annotations", annotations)
if filter_ref is not None:
pulumi.set(__self__, "filter_ref", filter_ref)
if label_selector is not None:
pulumi.set(__self__, "label_selector", label_selector)
if version is not None:
pulumi.set(__self__, "version", version)
@property
@pulumi.getter
def annotations(self) -> Optional[pulumi.Input[Mapping[str, pulumi.Input[str]]]]:
return pulumi.get(self, "annotations")
@annotations.setter
def annotations(self, value: Optional[pulumi.Input[Mapping[str, pulumi.Input[str]]]]):
pulumi.set(self, "annotations", value)
@property
@pulumi.getter(name="filterRef")
def filter_ref(self) -> Optional[pulumi.Input['SubscriptionSpecPackageFilterFilterRefArgs']]:
"""
LocalObjectReference contains enough information to let you locate the referenced object inside the same namespace.
"""
return pulumi.get(self, "filter_ref")
@filter_ref.setter
def filter_ref(self, value: Optional[pulumi.Input['SubscriptionSpecPackageFilterFilterRefArgs']]):
pulumi.set(self, "filter_ref", value)
@property
@pulumi.getter(name="labelSelector")
def label_selector(self) -> Optional[pulumi.Input['SubscriptionSpecPackageFilterLabelSelectorArgs']]:
"""
A label selector is a label query over a set of resources. The result of matchLabels and matchExpressions are ANDed. An empty label selector matches all objects. A null label selector matches no objects.
"""
return pulumi.get(self, "label_selector")
@label_selector.setter
def label_selector(self, value: Optional[pulumi.Input['SubscriptionSpecPackageFilterLabelSelectorArgs']]):
pulumi.set(self, "label_selector", value)
@property
@pulumi.getter
def version(self) -> Optional[pulumi.Input[str]]:
return pulumi.get(self, "version")
@version.setter
def version(self, value: Optional[pulumi.Input[str]]):
pulumi.set(self, "version", value)
@pulumi.input_type
class SubscriptionSpecPackageFilterFilterRefArgs:
def __init__(__self__, *,
name: Optional[pulumi.Input[str]] = None):
"""
LocalObjectReference contains enough information to let you locate the referenced object inside the same namespace.
:param pulumi.Input[str] name: Name of the referent. More info: https://kubernetes.io/docs/concepts/overview/working-with-objects/names/#names TODO: Add other useful fields. apiVersion, kind, uid?
"""
if name is not None:
pulumi.set(__self__, "name", name)
@property
@pulumi.getter
def name(self) -> Optional[pulumi.Input[str]]:
"""
Name of the referent. More info: https://kubernetes.io/docs/concepts/overview/working-with-objects/names/#names TODO: Add other useful fields. apiVersion, kind, uid?
"""
return pulumi.get(self, "name")
@name.setter
def name(self, value: Optional[pulumi.Input[str]]):
pulumi.set(self, "name", value)
@pulumi.input_type
class SubscriptionSpecPackageFilterLabelSelectorArgs:
def __init__(__self__, *,
match_expressions: Optional[pulumi.Input[Sequence[pulumi.Input['SubscriptionSpecPackageFilterLabelSelectorMatchExpressionsArgs']]]] = None,
match_labels: Optional[pulumi.Input[Mapping[str, pulumi.Input[str]]]] = None):
"""
A label selector is a label query over a set of resources. The result of matchLabels and matchExpressions are ANDed. An empty label selector matches all objects. A null label selector matches no objects.
:param pulumi.Input[Sequence[pulumi.Input['SubscriptionSpecPackageFilterLabelSelectorMatchExpressionsArgs']]] match_expressions: matchExpressions is a list of label selector requirements. The requirements are ANDed.
:param pulumi.Input[Mapping[str, pulumi.Input[str]]] match_labels: matchLabels is a map of {key,value} pairs. A single {key,value} in the matchLabels map is equivalent to an element of matchExpressions, whose key field is "key", the operator is "In", and the values array contains only "value". The requirements are ANDed.
"""
if match_expressions is not None:
pulumi.set(__self__, "match_expressions", match_expressions)
if match_labels is not None:
pulumi.set(__self__, "match_labels", match_labels)
@property
@pulumi.getter(name="matchExpressions")
def match_expressions(self) -> Optional[pulumi.Input[Sequence[pulumi.Input['SubscriptionSpecPackageFilterLabelSelectorMatchExpressionsArgs']]]]:
"""
matchExpressions is a list of label selector requirements. The requirements are ANDed.
"""
return pulumi.get(self, "match_expressions")
@match_expressions.setter
def match_expressions(self, value: Optional[pulumi.Input[Sequence[pulumi.Input['SubscriptionSpecPackageFilterLabelSelectorMatchExpressionsArgs']]]]):
pulumi.set(self, "match_expressions", value)
@property
@pulumi.getter(name="matchLabels")
def match_labels(self) -> Optional[pulumi.Input[Mapping[str, pulumi.Input[str]]]]:
"""
matchLabels is a map of {key,value} pairs. A single {key,value} in the matchLabels map is equivalent to an element of matchExpressions, whose key field is "key", the operator is "In", and the values array contains only "value". The requirements are ANDed.
"""
return pulumi.get(self, "match_labels")
@match_labels.setter
def match_labels(self, value: Optional[pulumi.Input[Mapping[str, pulumi.Input[str]]]]):
pulumi.set(self, "match_labels", value)
@pulumi.input_type
class SubscriptionSpecPackageFilterLabelSelectorMatchExpressionsArgs:
def __init__(__self__, *,
key: pulumi.Input[str],
operator: pulumi.Input[str],
values: Optional[pulumi.Input[Sequence[pulumi.Input[str]]]] = None):
"""
A label selector requirement is a selector that contains values, a key, and an operator that relates the key and values.
:param pulumi.Input[str] key: key is the label key that the selector applies to.
:param pulumi.Input[str] operator: operator represents a key's relationship to a set of values. Valid operators are In, NotIn, Exists and DoesNotExist.
:param pulumi.Input[Sequence[pulumi.Input[str]]] values: values is an array of string values. If the operator is In or NotIn, the values array must be non-empty. If the operator is Exists or DoesNotExist, the values array must be empty. This array is replaced during a strategic merge patch.
"""
pulumi.set(__self__, "key", key)
pulumi.set(__self__, "operator", operator)
if values is not None:
pulumi.set(__self__, "values", values)
@property
@pulumi.getter
def key(self) -> pulumi.Input[str]:
"""
key is the label key that the selector applies to.
"""
return pulumi.get(self, "key")
@key.setter
def key(self, value: pulumi.Input[str]):
pulumi.set(self, "key", value)
@property
@pulumi.getter
def operator(self) -> pulumi.Input[str]:
"""
operator represents a key's relationship to a set of values. Valid operators are In, NotIn, Exists and DoesNotExist.
"""
return pulumi.get(self, "operator")
@operator.setter
def operator(self, value: pulumi.Input[str]):
pulumi.set(self, "operator", value)
@property
@pulumi.getter
def values(self) -> Optional[pulumi.Input[Sequence[pulumi.Input[str]]]]:
"""
values is an array of string values. If the operator is In or NotIn, the values array must be non-empty. If the operator is Exists or DoesNotExist, the values array must be empty. This array is replaced during a strategic merge patch.
"""
return pulumi.get(self, "values")
@values.setter
def values(self, value: Optional[pulumi.Input[Sequence[pulumi.Input[str]]]]):
pulumi.set(self, "values", value)
@pulumi.input_type
class SubscriptionSpecPackageOverridesArgs:
def __init__(__self__, *,
package_name: pulumi.Input[str],
package_alias: Optional[pulumi.Input[str]] = None,
package_overrides: Optional[pulumi.Input[Sequence[pulumi.Input[Mapping[str, Any]]]]] = None):
"""
Overrides field in deployable
"""
pulumi.set(__self__, "package_name", package_name)
if package_alias is not None:
pulumi.set(__self__, "package_alias", package_alias)
if package_overrides is not None:
pulumi.set(__self__, "package_overrides", package_overrides)
@property
@pulumi.getter(name="packageName")
def package_name(self) -> pulumi.Input[str]:
return pulumi.get(self, "package_name")
@package_name.setter
def package_name(self, value: pulumi.Input[str]):
pulumi.set(self, "package_name", value)
@property
@pulumi.getter(name="packageAlias")
def package_alias(self) -> Optional[pulumi.Input[str]]:
return pulumi.get(self, "package_alias")
@package_alias.setter
def package_alias(self, value: Optional[pulumi.Input[str]]):
pulumi.set(self, "package_alias", value)
@property
@pulumi.getter(name="packageOverrides")
def package_overrides(self) -> Optional[pulumi.Input[Sequence[pulumi.Input[Mapping[str, Any]]]]]:
return pulumi.get(self, "package_overrides")
@package_overrides.setter
def package_overrides(self, value: Optional[pulumi.Input[Sequence[pulumi.Input[Mapping[str, Any]]]]]):
pulumi.set(self, "package_overrides", value)
@pulumi.input_type
class SubscriptionSpecPlacementArgs:
def __init__(__self__, *,
cluster_selector: Optional[pulumi.Input['SubscriptionSpecPlacementClusterSelectorArgs']] = None,
clusters: Optional[pulumi.Input[Sequence[pulumi.Input['SubscriptionSpecPlacementClustersArgs']]]] = None,
local: Optional[pulumi.Input[bool]] = None,
placement_ref: Optional[pulumi.Input['SubscriptionSpecPlacementPlacementRefArgs']] = None):
"""
For hub use only, to specify which clusters to go to
:param pulumi.Input['SubscriptionSpecPlacementClusterSelectorArgs'] cluster_selector: A label selector is a label query over a set of resources. The result of matchLabels and matchExpressions are ANDed. An empty label selector matches all objects. A null label selector matches no objects.
:param pulumi.Input['SubscriptionSpecPlacementPlacementRefArgs'] placement_ref: ObjectReference contains enough information to let you inspect or modify the referred object.
"""
if cluster_selector is not None:
pulumi.set(__self__, "cluster_selector", cluster_selector)
if clusters is not None:
pulumi.set(__self__, "clusters", clusters)
if local is not None:
pulumi.set(__self__, "local", local)
if placement_ref is not None:
pulumi.set(__self__, "placement_ref", placement_ref)
@property
@pulumi.getter(name="clusterSelector")
def cluster_selector(self) -> Optional[pulumi.Input['SubscriptionSpecPlacementClusterSelectorArgs']]:
"""
A label selector is a label query over a set of resources. The result of matchLabels and matchExpressions are ANDed. An empty label selector matches all objects. A null label selector matches no objects.
"""
return pulumi.get(self, "cluster_selector")
@cluster_selector.setter
def cluster_selector(self, value: Optional[pulumi.Input['SubscriptionSpecPlacementClusterSelectorArgs']]):
pulumi.set(self, "cluster_selector", value)
@property
@pulumi.getter
def clusters(self) -> Optional[pulumi.Input[Sequence[pulumi.Input['SubscriptionSpecPlacementClustersArgs']]]]:
return pulumi.get(self, "clusters")
@clusters.setter
def clusters(self, value: Optional[pulumi.Input[Sequence[pulumi.Input['SubscriptionSpecPlacementClustersArgs']]]]):
pulumi.set(self, "clusters", value)
@property
@pulumi.getter
def local(self) -> Optional[pulumi.Input[bool]]:
return pulumi.get(self, "local")
@local.setter
def local(self, value: Optional[pulumi.Input[bool]]):
pulumi.set(self, "local", value)
@property
@pulumi.getter(name="placementRef")
def placement_ref(self) -> Optional[pulumi.Input['SubscriptionSpecPlacementPlacementRefArgs']]:
"""
ObjectReference contains enough information to let you inspect or modify the referred object.
"""
return pulumi.get(self, "placement_ref")
@placement_ref.setter
def placement_ref(self, value: Optional[pulumi.Input['SubscriptionSpecPlacementPlacementRefArgs']]):
pulumi.set(self, "placement_ref", value)
@pulumi.input_type
class SubscriptionSpecPlacementClusterSelectorArgs:
def __init__(__self__, *,
match_expressions: Optional[pulumi.Input[Sequence[pulumi.Input['SubscriptionSpecPlacementClusterSelectorMatchExpressionsArgs']]]] = None,
match_labels: Optional[pulumi.Input[Mapping[str, pulumi.Input[str]]]] = None):
"""
A label selector is a label query over a set of resources. The result of matchLabels and matchExpressions are ANDed. An empty label selector matches all objects. A null label selector matches no objects.
:param pulumi.Input[Sequence[pulumi.Input['SubscriptionSpecPlacementClusterSelectorMatchExpressionsArgs']]] match_expressions: matchExpressions is a list of label selector requirements. The requirements are ANDed.
:param pulumi.Input[Mapping[str, pulumi.Input[str]]] match_labels: matchLabels is a map of {key,value} pairs. A single {key,value} in the matchLabels map is equivalent to an element of matchExpressions, whose key field is "key", the operator is "In", and the values array contains only "value". The requirements are ANDed.
"""
if match_expressions is not None:
pulumi.set(__self__, "match_expressions", match_expressions)
if match_labels is not None:
pulumi.set(__self__, "match_labels", match_labels)
@property
@pulumi.getter(name="matchExpressions")
def match_expressions(self) -> Optional[pulumi.Input[Sequence[pulumi.Input['SubscriptionSpecPlacementClusterSelectorMatchExpressionsArgs']]]]:
"""
matchExpressions is a list of label selector requirements. The requirements are ANDed.
"""
return pulumi.get(self, "match_expressions")
@match_expressions.setter
def match_expressions(self, value: Optional[pulumi.Input[Sequence[pulumi.Input['SubscriptionSpecPlacementClusterSelectorMatchExpressionsArgs']]]]):
pulumi.set(self, "match_expressions", value)
@property
@pulumi.getter(name="matchLabels")
def match_labels(self) -> Optional[pulumi.Input[Mapping[str, pulumi.Input[str]]]]:
"""
matchLabels is a map of {key,value} pairs. A single {key,value} in the matchLabels map is equivalent to an element of matchExpressions, whose key field is "key", the operator is "In", and the values array contains only "value". The requirements are ANDed.
"""
return pulumi.get(self, "match_labels")
@match_labels.setter
def match_labels(self, value: Optional[pulumi.Input[Mapping[str, pulumi.Input[str]]]]):
pulumi.set(self, "match_labels", value)
@pulumi.input_type
class SubscriptionSpecPlacementClusterSelectorMatchExpressionsArgs:
def __init__(__self__, *,
key: pulumi.Input[str],
operator: pulumi.Input[str],
values: Optional[pulumi.Input[Sequence[pulumi.Input[str]]]] = None):
"""
A label selector requirement is a selector that contains values, a key, and an operator that relates the key and values.
:param pulumi.Input[str] key: key is the label key that the selector applies to.
:param pulumi.Input[str] operator: operator represents a key's relationship to a set of values. Valid operators are In, NotIn, Exists and DoesNotExist.
:param pulumi.Input[Sequence[pulumi.Input[str]]] values: values is an array of string values. If the operator is In or NotIn, the values array must be non-empty. If the operator is Exists or DoesNotExist, the values array must be empty. This array is replaced during a strategic merge patch.
"""
pulumi.set(__self__, "key", key)
pulumi.set(__self__, "operator", operator)
if values is not None:
pulumi.set(__self__, "values", values)
@property
@pulumi.getter
def key(self) -> pulumi.Input[str]:
"""
key is the label key that the selector applies to.
"""
return pulumi.get(self, "key")
@key.setter
def key(self, value: pulumi.Input[str]):
pulumi.set(self, "key", value)
@property
@pulumi.getter
def operator(self) -> pulumi.Input[str]:
"""
operator represents a key's relationship to a set of values. Valid operators are In, NotIn, Exists and DoesNotExist.
"""
return pulumi.get(self, "operator")
@operator.setter
def operator(self, value: pulumi.Input[str]):
pulumi.set(self, "operator", value)
@property
@pulumi.getter
def values(self) -> Optional[pulumi.Input[Sequence[pulumi.Input[str]]]]:
"""
values is an array of string values. If the operator is In or NotIn, the values array must be non-empty. If the operator is Exists or DoesNotExist, the values array must be empty. This array is replaced during a strategic merge patch.
"""
return pulumi.get(self, "values")
@values.setter
def values(self, value: Optional[pulumi.Input[Sequence[pulumi.Input[str]]]]):
pulumi.set(self, "values", value)
@pulumi.input_type
class SubscriptionSpecPlacementClustersArgs:
def __init__(__self__, *,
name: pulumi.Input[str]):
"""
GenericClusterReference - in alignment with kubefed
"""
pulumi.set(__self__, "name", name)
@property
@pulumi.getter
def name(self) -> pulumi.Input[str]:
return pulumi.get(self, "name")
@name.setter
def name(self, value: pulumi.Input[str]):
pulumi.set(self, "name", value)
@pulumi.input_type
class SubscriptionSpecPlacementPlacementRefArgs:
def __init__(__self__, *,
api_version: Optional[pulumi.Input[str]] = None,
field_path: Optional[pulumi.Input[str]] = None,
kind: Optional[pulumi.Input[str]] = None,
name: Optional[pulumi.Input[str]] = None,
namespace: Optional[pulumi.Input[str]] = None,
resource_version: Optional[pulumi.Input[str]] = None,
uid: Optional[pulumi.Input[str]] = None):
"""
ObjectReference contains enough information to let you inspect or modify the referred object.
:param pulumi.Input[str] api_version: API version of the referent.
:param pulumi.Input[str] field_path: If referring to a piece of an object instead of an entire object, this string should contain a valid JSON/Go field access statement, such as desiredState.manifest.containers[2]. For example, if the object reference is to a container within a pod, this would take on a value like: "spec.containers{name}" (where "name" refers to the name of the container that triggered the event) or if no container name is specified "spec.containers[2]" (container with index 2 in this pod). This syntax is chosen only to have some well-defined way of referencing a part of an object. TODO: this design is not final and this field is subject to change in the future.
:param pulumi.Input[str] kind: Kind of the referent. More info: https://git.k8s.io/community/contributors/devel/api-conventions.md#types-kinds
:param pulumi.Input[str] name: Name of the referent. More info: https://kubernetes.io/docs/concepts/overview/working-with-objects/names/#names
:param pulumi.Input[str] namespace: Namespace of the referent. More info: https://kubernetes.io/docs/concepts/overview/working-with-objects/namespaces/
:param pulumi.Input[str] resource_version: Specific resourceVersion to which this reference is made, if any. More info: https://git.k8s.io/community/contributors/devel/api-conventions.md#concurrency-control-and-consistency
:param pulumi.Input[str] uid: UID of the referent. More info: https://kubernetes.io/docs/concepts/overview/working-with-objects/names/#uids
"""
if api_version is not None:
pulumi.set(__self__, "api_version", api_version)
if field_path is not None:
pulumi.set(__self__, "field_path", field_path)
if kind is not None:
pulumi.set(__self__, "kind", kind)
if name is not None:
pulumi.set(__self__, "name", name)
if namespace is not None:
pulumi.set(__self__, "namespace", namespace)
if resource_version is not None:
pulumi.set(__self__, "resource_version", resource_version)
if uid is not None:
pulumi.set(__self__, "uid", uid)
@property
@pulumi.getter(name="apiVersion")
def api_version(self) -> Optional[pulumi.Input[str]]:
"""
API version of the referent.
"""
return pulumi.get(self, "api_version")
@api_version.setter
def api_version(self, value: Optional[pulumi.Input[str]]):
pulumi.set(self, "api_version", value)
@property
@pulumi.getter(name="fieldPath")
def field_path(self) -> Optional[pulumi.Input[str]]:
"""
If referring to a piece of an object instead of an entire object, this string should contain a valid JSON/Go field access statement, such as desiredState.manifest.containers[2]. For example, if the object reference is to a container within a pod, this would take on a value like: "spec.containers{name}" (where "name" refers to the name of the container that triggered the event) or if no container name is specified "spec.containers[2]" (container with index 2 in this pod). This syntax is chosen only to have some well-defined way of referencing a part of an object. TODO: this design is not final and this field is subject to change in the future.
"""
return pulumi.get(self, "field_path")
@field_path.setter
def field_path(self, value: Optional[pulumi.Input[str]]):
pulumi.set(self, "field_path", value)
@property
@pulumi.getter
def kind(self) -> Optional[pulumi.Input[str]]:
"""
Kind of the referent. More info: https://git.k8s.io/community/contributors/devel/api-conventions.md#types-kinds
"""
return pulumi.get(self, "kind")
@kind.setter
def kind(self, value: Optional[pulumi.Input[str]]):
pulumi.set(self, "kind", value)
@property
@pulumi.getter
def name(self) -> Optional[pulumi.Input[str]]:
"""
Name of the referent. More info: https://kubernetes.io/docs/concepts/overview/working-with-objects/names/#names
"""
return pulumi.get(self, "name")
@name.setter
def name(self, value: Optional[pulumi.Input[str]]):
pulumi.set(self, "name", value)
@property
@pulumi.getter
def namespace(self) -> Optional[pulumi.Input[str]]:
"""
Namespace of the referent. More info: https://kubernetes.io/docs/concepts/overview/working-with-objects/namespaces/
"""
return pulumi.get(self, "namespace")
@namespace.setter
def namespace(self, value: Optional[pulumi.Input[str]]):
pulumi.set(self, "namespace", value)
@property
@pulumi.getter(name="resourceVersion")
def resource_version(self) -> Optional[pulumi.Input[str]]:
"""
Specific resourceVersion to which this reference is made, if any. More info: https://git.k8s.io/community/contributors/devel/api-conventions.md#concurrency-control-and-consistency
"""
return pulumi.get(self, "resource_version")
@resource_version.setter
def resource_version(self, value: Optional[pulumi.Input[str]]):
pulumi.set(self, "resource_version", value)
@property
@pulumi.getter
def uid(self) -> Optional[pulumi.Input[str]]:
"""
UID of the referent. More info: https://kubernetes.io/docs/concepts/overview/working-with-objects/names/#uids
"""
return pulumi.get(self, "uid")
@uid.setter
def uid(self, value: Optional[pulumi.Input[str]]):
pulumi.set(self, "uid", value)
@pulumi.input_type
class SubscriptionSpecTimewindowArgs:
def __init__(__self__, *,
daysofweek: Optional[pulumi.Input[Sequence[pulumi.Input[str]]]] = None,
hours: Optional[pulumi.Input[Sequence[pulumi.Input['SubscriptionSpecTimewindowHoursArgs']]]] = None,
location: Optional[pulumi.Input[str]] = None,
windowtype: Optional[pulumi.Input[str]] = None):
"""
help user control when the subscription will take affect
:param pulumi.Input[Sequence[pulumi.Input[str]]] daysofweek: weekdays defined the day of the week for this time window https://golang.org/pkg/time/#Weekday
:param pulumi.Input[str] location: https://en.wikipedia.org/wiki/List_of_tz_database_time_zones
:param pulumi.Input[str] windowtype: active time window or not, if timewindow is active, then deploy will only applies during these windows Note, if you want to generation crd with operator-sdk v0.10.0, then the following line should be: <+kubebuilder:validation:Enum=active,blocked,Active,Blocked>
"""
if daysofweek is not None:
pulumi.set(__self__, "daysofweek", daysofweek)
if hours is not None:
pulumi.set(__self__, "hours", hours)
if location is not None:
pulumi.set(__self__, "location", location)
if windowtype is not None:
pulumi.set(__self__, "windowtype", windowtype)
@property
@pulumi.getter
def daysofweek(self) -> Optional[pulumi.Input[Sequence[pulumi.Input[str]]]]:
"""
weekdays defined the day of the week for this time window https://golang.org/pkg/time/#Weekday
"""
return pulumi.get(self, "daysofweek")
@daysofweek.setter
def daysofweek(self, value: Optional[pulumi.Input[Sequence[pulumi.Input[str]]]]):
pulumi.set(self, "daysofweek", value)
@property
@pulumi.getter
def hours(self) -> Optional[pulumi.Input[Sequence[pulumi.Input['SubscriptionSpecTimewindowHoursArgs']]]]:
return pulumi.get(self, "hours")
@hours.setter
def hours(self, value: Optional[pulumi.Input[Sequence[pulumi.Input['SubscriptionSpecTimewindowHoursArgs']]]]):
pulumi.set(self, "hours", value)
@property
@pulumi.getter
def location(self) -> Optional[pulumi.Input[str]]:
"""
https://en.wikipedia.org/wiki/List_of_tz_database_time_zones
"""
return pulumi.get(self, "location")
@location.setter
def location(self, value: Optional[pulumi.Input[str]]):
pulumi.set(self, "location", value)
@property
@pulumi.getter
def windowtype(self) -> Optional[pulumi.Input[str]]:
"""
active time window or not, if timewindow is active, then deploy will only applies during these windows Note, if you want to generation crd with operator-sdk v0.10.0, then the following line should be: <+kubebuilder:validation:Enum=active,blocked,Active,Blocked>
"""
return pulumi.get(self, "windowtype")
@windowtype.setter
def windowtype(self, value: Optional[pulumi.Input[str]]):
pulumi.set(self, "windowtype", value)
@pulumi.input_type
class SubscriptionSpecTimewindowHoursArgs:
def __init__(__self__, *,
end: Optional[pulumi.Input[str]] = None,
start: Optional[pulumi.Input[str]] = None):
"""
HourRange time format for each time will be Kitchen format, defined at https://golang.org/pkg/time/#pkg-constants
"""
if end is not None:
pulumi.set(__self__, "end", end)
if start is not None:
pulumi.set(__self__, "start", start)
@property
@pulumi.getter
def end(self) -> Optional[pulumi.Input[str]]:
return pulumi.get(self, "end")
@end.setter
def end(self, value: Optional[pulumi.Input[str]]):
pulumi.set(self, "end", value)
@property
@pulumi.getter
def start(self) -> Optional[pulumi.Input[str]]:
return pulumi.get(self, "start")
@start.setter
def start(self, value: Optional[pulumi.Input[str]]):
pulumi.set(self, "start", value)
@pulumi.input_type
class SubscriptionStatusArgs:
def __init__(__self__, *,
ansiblejobs: Optional[pulumi.Input['SubscriptionStatusAnsiblejobsArgs']] = None,
last_update_time: Optional[pulumi.Input[str]] = None,
message: Optional[pulumi.Input[str]] = None,
phase: Optional[pulumi.Input[str]] = None,
reason: Optional[pulumi.Input[str]] = None,
statuses: Optional[pulumi.Input[Mapping[str, pulumi.Input['SubscriptionStatusStatusesArgs']]]] = None):
"""
SubscriptionStatus defines the observed state of Subscription Examples - status of a subscription on hub Status: phase: Propagated statuses: washdc: packages: nginx: phase: Subscribed mongodb: phase: Failed Reason: "not authorized" Message: "user xxx does not have permission to start pod" resourceStatus: {} toronto: packages: nginx: phase: Subscribed mongodb: phase: Subscribed Status of a subscription on managed cluster will only have 1 cluster in the map.
:param pulumi.Input[str] phase: INSERT ADDITIONAL STATUS FIELD - define observed state of cluster Important: Run "make" to regenerate code after modifying this file
:param pulumi.Input[Mapping[str, pulumi.Input['SubscriptionStatusStatusesArgs']]] statuses: For endpoint, it is the status of subscription, key is packagename, For hub, it aggregates all status, key is cluster name
"""
if ansiblejobs is not None:
pulumi.set(__self__, "ansiblejobs", ansiblejobs)
if last_update_time is not None:
pulumi.set(__self__, "last_update_time", last_update_time)
if message is not None:
pulumi.set(__self__, "message", message)
if phase is not None:
pulumi.set(__self__, "phase", phase)
if reason is not None:
pulumi.set(__self__, "reason", reason)
if statuses is not None:
pulumi.set(__self__, "statuses", statuses)
@property
@pulumi.getter
def ansiblejobs(self) -> Optional[pulumi.Input['SubscriptionStatusAnsiblejobsArgs']]:
return pulumi.get(self, "ansiblejobs")
@ansiblejobs.setter
def ansiblejobs(self, value: Optional[pulumi.Input['SubscriptionStatusAnsiblejobsArgs']]):
pulumi.set(self, "ansiblejobs", value)
@property
@pulumi.getter(name="lastUpdateTime")
def last_update_time(self) -> Optional[pulumi.Input[str]]:
return pulumi.get(self, "last_update_time")
@last_update_time.setter
def last_update_time(self, value: Optional[pulumi.Input[str]]):
pulumi.set(self, "last_update_time", value)
@property
@pulumi.getter
def message(self) -> Optional[pulumi.Input[str]]:
return pulumi.get(self, "message")
@message.setter
def message(self, value: Optional[pulumi.Input[str]]):
pulumi.set(self, "message", value)
@property
@pulumi.getter
def phase(self) -> Optional[pulumi.Input[str]]:
"""
INSERT ADDITIONAL STATUS FIELD - define observed state of cluster Important: Run "make" to regenerate code after modifying this file
"""
return pulumi.get(self, "phase")
@phase.setter
def phase(self, value: Optional[pulumi.Input[str]]):
pulumi.set(self, "phase", value)
@property
@pulumi.getter
def reason(self) -> Optional[pulumi.Input[str]]:
return pulumi.get(self, "reason")
@reason.setter
def reason(self, value: Optional[pulumi.Input[str]]):
pulumi.set(self, "reason", value)
@property
@pulumi.getter
def statuses(self) -> Optional[pulumi.Input[Mapping[str, pulumi.Input['SubscriptionStatusStatusesArgs']]]]:
"""
For endpoint, it is the status of subscription, key is packagename, For hub, it aggregates all status, key is cluster name
"""
return pulumi.get(self, "statuses")
@statuses.setter
def statuses(self, value: Optional[pulumi.Input[Mapping[str, pulumi.Input['SubscriptionStatusStatusesArgs']]]]):
pulumi.set(self, "statuses", value)
@pulumi.input_type
class SubscriptionStatusAnsiblejobsArgs:
def __init__(__self__, *,
lastposthookjob: Optional[pulumi.Input[str]] = None,
lastprehookjob: Optional[pulumi.Input[str]] = None,
posthookjobshistory: Optional[pulumi.Input[Sequence[pulumi.Input[str]]]] = None,
prehookjobshistory: Optional[pulumi.Input[Sequence[pulumi.Input[str]]]] = None):
if lastposthookjob is not None:
pulumi.set(__self__, "lastposthookjob", lastposthookjob)
if lastprehookjob is not None:
pulumi.set(__self__, "lastprehookjob", lastprehookjob)
if posthookjobshistory is not None:
pulumi.set(__self__, "posthookjobshistory", posthookjobshistory)
if prehookjobshistory is not None:
pulumi.set(__self__, "prehookjobshistory", prehookjobshistory)
@property
@pulumi.getter
def lastposthookjob(self) -> Optional[pulumi.Input[str]]:
return pulumi.get(self, "lastposthookjob")
@lastposthookjob.setter
def lastposthookjob(self, value: Optional[pulumi.Input[str]]):
pulumi.set(self, "lastposthookjob", value)
@property
@pulumi.getter
def lastprehookjob(self) -> Optional[pulumi.Input[str]]:
return pulumi.get(self, "lastprehookjob")
@lastprehookjob.setter
def lastprehookjob(self, value: Optional[pulumi.Input[str]]):
pulumi.set(self, "lastprehookjob", value)
@property
@pulumi.getter
def posthookjobshistory(self) -> Optional[pulumi.Input[Sequence[pulumi.Input[str]]]]:
return pulumi.get(self, "posthookjobshistory")
@posthookjobshistory.setter
def posthookjobshistory(self, value: Optional[pulumi.Input[Sequence[pulumi.Input[str]]]]):
pulumi.set(self, "posthookjobshistory", value)
@property
@pulumi.getter
def prehookjobshistory(self) -> Optional[pulumi.Input[Sequence[pulumi.Input[str]]]]:
return pulumi.get(self, "prehookjobshistory")
@prehookjobshistory.setter
def prehookjobshistory(self, value: Optional[pulumi.Input[Sequence[pulumi.Input[str]]]]):
pulumi.set(self, "prehookjobshistory", value)
@pulumi.input_type
class SubscriptionStatusStatusesArgs:
def __init__(__self__, *,
packages: Optional[pulumi.Input[Mapping[str, pulumi.Input['SubscriptionStatusStatusesPackagesArgs']]]] = None):
"""
SubscriptionPerClusterStatus defines status for subscription in each cluster, key is package name
"""
if packages is not None:
pulumi.set(__self__, "packages", packages)
@property
@pulumi.getter
def packages(self) -> Optional[pulumi.Input[Mapping[str, pulumi.Input['SubscriptionStatusStatusesPackagesArgs']]]]:
return pulumi.get(self, "packages")
@packages.setter
def packages(self, value: Optional[pulumi.Input[Mapping[str, pulumi.Input['SubscriptionStatusStatusesPackagesArgs']]]]):
pulumi.set(self, "packages", value)
@pulumi.input_type
class SubscriptionStatusStatusesPackagesArgs:
def __init__(__self__, *,
last_update_time: pulumi.Input[str],
message: Optional[pulumi.Input[str]] = None,
phase: Optional[pulumi.Input[str]] = None,
reason: Optional[pulumi.Input[str]] = None,
resource_status: Optional[pulumi.Input[Mapping[str, Any]]] = None):
"""
SubscriptionUnitStatus defines status of a unit (subscription or package)
:param pulumi.Input[str] phase: Phase are Propagated if it is in hub or Subscribed if it is in endpoint
"""
pulumi.set(__self__, "last_update_time", last_update_time)
if message is not None:
pulumi.set(__self__, "message", message)
if phase is not None:
pulumi.set(__self__, "phase", phase)
if reason is not None:
pulumi.set(__self__, "reason", reason)
if resource_status is not None:
pulumi.set(__self__, "resource_status", resource_status)
@property
@pulumi.getter(name="lastUpdateTime")
def last_update_time(self) -> pulumi.Input[str]:
return pulumi.get(self, "last_update_time")
@last_update_time.setter
def last_update_time(self, value: pulumi.Input[str]):
pulumi.set(self, "last_update_time", value)
@property
@pulumi.getter
def message(self) -> Optional[pulumi.Input[str]]:
return pulumi.get(self, "message")
@message.setter
def message(self, value: Optional[pulumi.Input[str]]):
pulumi.set(self, "message", value)
@property
@pulumi.getter
def phase(self) -> Optional[pulumi.Input[str]]:
"""
Phase are Propagated if it is in hub or Subscribed if it is in endpoint
"""
return pulumi.get(self, "phase")
@phase.setter
def phase(self, value: Optional[pulumi.Input[str]]):
pulumi.set(self, "phase", value)
@property
@pulumi.getter
def reason(self) -> Optional[pulumi.Input[str]]:
return pulumi.get(self, "reason")
@reason.setter
def reason(self, value: Optional[pulumi.Input[str]]):
pulumi.set(self, "reason", value)
@property
@pulumi.getter(name="resourceStatus")
def resource_status(self) -> Optional[pulumi.Input[Mapping[str, Any]]]:
return pulumi.get(self, "resource_status")
@resource_status.setter
def resource_status(self, value: Optional[pulumi.Input[Mapping[str, Any]]]):
pulumi.set(self, "resource_status", value)
| 48.971721
| 1,673
| 0.683153
| 19,842
| 162,782
| 5.50005
| 0.031549
| 0.092833
| 0.067093
| 0.059268
| 0.873273
| 0.843291
| 0.820951
| 0.787917
| 0.772688
| 0.755525
| 0
| 0.001269
| 0.210656
| 162,782
| 3,323
| 1,674
| 48.986458
| 0.848067
| 0.363148
| 0
| 0.722768
| 1
| 0
| 0.128861
| 0.06227
| 0
| 0
| 0
| 0.006019
| 0
| 1
| 0.206639
| false
| 0
| 0.002338
| 0.02805
| 0.324451
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 1
| 1
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 10
|
571d21b2e7a9acee92c1d9e1ded1642cfa7da088
| 129
|
py
|
Python
|
proj_hello_world/__init__.py
|
xzheng902/proj_hello_world
|
797daa58864c55bf7ebd9c7f2e46a5a07da4c851
|
[
"MIT"
] | null | null | null |
proj_hello_world/__init__.py
|
xzheng902/proj_hello_world
|
797daa58864c55bf7ebd9c7f2e46a5a07da4c851
|
[
"MIT"
] | null | null | null |
proj_hello_world/__init__.py
|
xzheng902/proj_hello_world
|
797daa58864c55bf7ebd9c7f2e46a5a07da4c851
|
[
"MIT"
] | null | null | null |
# __init__.py
# Mike Zheng
# 6/18/19
# python project example
import proj_hello_world.hello_world, proj_hello_world.simple_math
| 18.428571
| 65
| 0.806202
| 21
| 129
| 4.47619
| 0.761905
| 0.319149
| 0.297872
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.04386
| 0.116279
| 129
| 6
| 66
| 21.5
| 0.780702
| 0.410853
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| true
| 0
| 1
| 0
| 1
| 0
| 1
| 0
| 0
| null | 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 0
| 1
| 0
|
0
| 7
|
572f6e8fba55da545877bbd295facd7cfee164c1
| 17,879
|
py
|
Python
|
test/test_block.py
|
ComplexArts/pyctrl-core
|
a72bd53924410c2e7f1e71c8188a0391550febdd
|
[
"Apache-2.0"
] | null | null | null |
test/test_block.py
|
ComplexArts/pyctrl-core
|
a72bd53924410c2e7f1e71c8188a0391550febdd
|
[
"Apache-2.0"
] | null | null | null |
test/test_block.py
|
ComplexArts/pyctrl-core
|
a72bd53924410c2e7f1e71c8188a0391550febdd
|
[
"Apache-2.0"
] | null | null | null |
import unittest
import numpy
import pyctrl.block as block
class TestUnittestAssertions(unittest.TestCase):
def test_BufferBlock(self):
# no mux, no demux
obj = block.ShortCircuit()
obj.write()
self.assertTrue(obj.read() == ())
obj.write(1)
self.assertTrue(obj.read() == (1,))
obj.write(1, 2)
self.assertTrue(obj.read() == (1, 2))
obj.write(1, numpy.array([2, 3]))
self.assertTrue(len(obj.read()) == 2)
self.assertTrue(obj.read()[0] == 1)
self.assertTrue(numpy.array_equal(obj.read()[1], numpy.array([2, 3])))
obj.write(1, numpy.array([2, 3]), numpy.array([4, 5]))
self.assertTrue(len(obj.read()) == 3)
self.assertTrue(obj.read()[0] == 1)
self.assertTrue(numpy.array_equal(obj.read()[1], numpy.array([2, 3])))
self.assertTrue(numpy.array_equal(obj.read()[2], numpy.array([4, 5])))
# mux, no demux
obj = block.ShortCircuit(mux=True)
obj.write()
self.assertTrue(obj.read() == ())
obj.write(1)
self.assertTrue(len(obj.read()) == 1)
self.assertTrue(numpy.array_equal(obj.read()[0], numpy.array([1])))
obj.write(1, 2)
self.assertTrue(len(obj.read()) == 1)
self.assertTrue(numpy.array_equal(obj.read()[0], numpy.array([1, 2])))
obj.write(1, [2, 3])
self.assertTrue(len(obj.read()) == 1)
self.assertTrue(numpy.array_equal(obj.read()[0], numpy.array([1, 2, 3])))
obj.write(1, numpy.array([2, 3]))
self.assertTrue(len(obj.read()) == 1)
self.assertTrue(numpy.array_equal(obj.read()[0], numpy.array([1, 2, 3])))
obj.write(1, numpy.array([2, 3]), numpy.array([4, 5]))
self.assertTrue(len(obj.read()) == 1)
self.assertTrue(numpy.array_equal(obj.read()[0], numpy.array([1, 2, 3, 4, 5])))
# mux, demux
obj = block.ShortCircuit(mux=True, demux=True)
obj.write()
self.assertTrue(obj.read() == ())
obj.write(1)
self.assertTrue(obj.read() == (1,))
obj.write(1, 2)
self.assertTrue(len(obj.read()) == 2)
self.assertTrue(obj.read() == (1, 2))
obj.write(1, numpy.array([2, 3]))
self.assertTrue(len(obj.read()) == 3)
self.assertTrue(obj.read() == (1, 2, 3))
obj.write(1, numpy.array([2, 3]), numpy.array([4, 5]))
self.assertTrue(len(obj.read()) == 5)
self.assertTrue(obj.read() == (1, 2, 3, 4, 5))
with self.assertRaises(block.BlockException):
obj = block.ShortCircuit(asd=1)
def test_Printer(self):
obj = block.Printer()
obj.write(1.5)
obj.write([1.5, 1.3])
obj.write((1.5, 1.3))
obj.write(*[1.5, 1.3])
obj.write(*(1.5, 1.3))
self.assertTrue(obj.get() == {'enabled': True, 'endln': '\n', 'frmt': '{: 12.4f}', 'sep': ' ', 'file': None,
'message': None})
self.assertTrue(obj.get('enabled') == True)
self.assertTrue(obj.get('endln', 'frmt') == {'endln': '\n', 'frmt': '{: 12.4f}'})
with self.assertRaises(block.BlockException):
obj = block.Printer(adsadsda=1)
with self.assertRaises(TypeError):
obj = block.Printer(1, "adsadsda")
with self.assertRaises(block.BlockException):
obj = block.Printer(par=1)
with self.assertRaises(block.BlockException):
obj = block.Printer(par="adsadsda")
with self.assertRaises(block.BlockException):
obj = block.Printer(par="adsadsda", sadasd=1)
obj = block.Printer(enabled=False)
def test_set(self):
blk = block.Printer()
self.assertTrue(blk.get() == {'enabled': True, 'endln': '\n', 'frmt': '{: 12.4f}', 'sep': ' ', 'file': None,
'message': None})
self.assertTrue(blk.get('enabled', 'frmt') == {'frmt': '{: 12.4f}', 'enabled': True})
self.assertTrue(blk.get('enabled') == True)
with self.assertRaises(KeyError):
blk.get('*enabled')
blk.set(enabled=False)
self.assertTrue(blk.get('enabled') == False)
blk.set(enabled=True)
self.assertTrue(blk.get('enabled') == True)
with self.assertRaises(block.BlockException):
blk.set(_enabled=True)
blk.set(sep='-')
self.assertTrue(blk.get('sep') == '-')
blk = block.BufferBlock()
self.assertTrue(blk.get() == {'enabled': True, 'demux': False, 'mux': False})
# test twice to make sure it is copying
self.assertTrue(blk.get() == {'enabled': True, 'demux': False, 'mux': False})
with self.assertRaises(KeyError):
blk.get('buffer')
blk.set(demux=True)
self.assertTrue(blk.get('demux') == True)
with self.assertRaises(block.BlockException):
blk.set(buffer=(1,))
with self.assertRaises(block.BlockException):
blk.set(controller=None)
with self.assertRaises(KeyError):
blk.get('buffer')
with self.assertRaises(KeyError):
blk.get('controller')
def test_logger(self):
import pyctrl.block as logger
import numpy as np
_logger = logger.Logger()
# write one entry
_logger.write(1, 2, 3)
log = _logger.get_log()
self.assertTrue(np.array_equal(log, np.array([[1, 2, 3]])))
# write second entry
_logger.write(4, 5, 6)
log = _logger.get_log()
self.assertTrue(np.array_equal(log, np.array([[1, 2, 3], [4, 5, 6]])))
# dynamic reshaping
_logger.write(1, 2, 3, 5)
log = _logger.get_log()
self.assertTrue(np.array_equal(log, np.array([[1, 2, 3, 5]])))
# vector entries
_logger.write(1, 2, [5, 3])
log = _logger.get_log()
self.assertTrue(np.array_equal(log, np.array([[1, 2, 3, 5], [1, 2, 5, 3]])))
# vector entries
_logger.write([1, 2], 2, [3, 5])
log = _logger.get_log()
self.assertTrue(np.array_equal(log, np.array([[1, 2, 2, 3, 5]])))
self.assertTrue(_logger.get() == {'auto_reset': False, 'enabled': True, 'current': 1, 'page': 0, 'labels': None,
'index': None})
# vector entries
_logger.write([1, 3], [2, 3, 5])
log = _logger.get_log()
self.assertTrue(np.array_equal(log, np.array([[1, 2, 2, 3, 5], [1, 3, 2, 3, 5]])))
# with labels
_logger = logger.Logger(labels=['s1', 's2', 's3'])
# write one entry
_logger.write(1, 2, 3)
self.assertTrue(_logger.get('index') == (0, 1, 2, 3))
log = _logger.get_log()
self.assertTrue(np.array_equal(log['s1'], np.array([[1]])))
self.assertTrue(np.array_equal(log['s2'], np.array([[2]])))
self.assertTrue(np.array_equal(log['s3'], np.array([[3]])))
self.assertTrue(_logger.get('index') == (0, 1, 2, 3))
# write second entry
_logger.write(4, 5, 6)
log = _logger.get_log()
self.assertTrue(np.array_equal(log['s1'], np.array([[1], [4]])))
self.assertTrue(np.array_equal(log['s2'], np.array([[2], [5]])))
self.assertTrue(np.array_equal(log['s3'], np.array([[3], [6]])))
self.assertTrue(_logger.get('index') == (0, 1, 2, 3))
_logger.set(labels=['s1', 's2', 's3', 's4'])
self.assertTrue(_logger.get('labels') == ['s1', 's2', 's3', 's4'])
# dynamic reshaping
_logger.write(1, 2, 3, 5)
log = _logger.get_log()
self.assertTrue(np.array_equal(log['s1'], np.array([[1]])))
self.assertTrue(np.array_equal(log['s2'], np.array([[2]])))
self.assertTrue(np.array_equal(log['s3'], np.array([[3]])))
self.assertTrue(np.array_equal(log['s4'], np.array([[5]])))
self.assertTrue(_logger.get('index') == (0, 1, 2, 3, 4))
# vector entries
_logger.write(1, 2, 4, [3, 5])
log = _logger.get_log()
self.assertTrue(np.array_equal(log['s1'], np.array([[1]])))
self.assertTrue(np.array_equal(log['s2'], np.array([[2]])))
self.assertTrue(np.array_equal(log['s3'], np.array([[4]])))
self.assertTrue(np.array_equal(log['s4'], np.array([[3, 5]])))
self.assertTrue(_logger.get('index') == (0, 1, 2, 3, 5))
# vector entries
_logger.set(labels=['s1', 's2', 's3', 's4'])
_logger.write([1, 2], 2, 4, [3, 5])
log = _logger.get_log()
self.assertTrue(np.array_equal(log['s1'], np.array([[1, 2]])))
self.assertTrue(np.array_equal(log['s2'], np.array([[2]])))
self.assertTrue(np.array_equal(log['s3'], np.array([[4]])))
self.assertTrue(np.array_equal(log['s4'], np.array([[3, 5]])))
self.assertTrue(_logger.get('index') == (0, 2, 3, 4, 6))
self.assertTrue(_logger.get() == {'auto_reset': False, 'enabled': True, 'current': 1, 'page': 0,
'labels': ['s1', 's2', 's3', 's4'], 'index': (0, 2, 3, 4, 6)})
def test_Signal(self):
import numpy as np
x = np.array([1, 2, 3])
obj = block.Signal(signal=x, repeat=True)
k = 0
(y,) = obj.read()
self.assertTrue(y == x[k])
k += 1
(y,) = obj.read()
self.assertTrue(y == x[k])
k += 1
(y,) = obj.read()
self.assertTrue(y == x[k])
k = 0
(y,) = obj.read()
self.assertTrue(y == x[k])
k += 1
(y,) = obj.read()
self.assertTrue(y == x[k])
k += 1
(y,) = obj.read()
self.assertTrue(y == x[k])
x = np.array([1, 2, 3])
obj = block.Signal(signal=x, repeat=False)
k = 0
(y,) = obj.read()
self.assertTrue(y == x[k])
k += 1
(y,) = obj.read()
self.assertTrue(y == x[k])
k += 1
(y,) = obj.read()
self.assertTrue(y == x[k])
(y,) = obj.read()
self.assertTrue(y == 0)
(y,) = obj.read()
self.assertTrue(y == 0)
(y,) = obj.read()
self.assertTrue(y == 0)
x = np.array([1, 2, 3])
obj = block.Signal(signal=x, repeat=True)
k = 0
(y,) = obj.read()
self.assertTrue(y == x[k])
k += 1
(y,) = obj.read()
self.assertTrue(y == x[k])
k += 1
obj.reset()
k = 0
(y,) = obj.read()
self.assertTrue(y == x[0])
k = 0
obj.set(index=k)
(y,) = obj.read()
self.assertTrue(y == x[k])
k = 2
obj.set(index=k)
(y,) = obj.read()
self.assertTrue(y == x[k])
k = 3
obj.set(index=k)
(y,) = obj.read()
self.assertTrue(y == x[0])
obj.set(repeat=False)
k = 3
with self.assertRaises(AssertionError):
obj.set(index=k)
def test_apply(self):
def f(*x):
return x[0] < 1
obj = block.Apply(function=f)
obj.write(0)
(y,) = obj.read()
self.assertTrue(y == True)
obj.write(1)
(y,) = obj.read()
self.assertTrue(y == False)
obj.write(0, 1)
(y,) = obj.read()
self.assertTrue(y == True)
obj.write(1, 2)
(y,) = obj.read()
self.assertTrue(y == False)
def g(*x):
return all(map(lambda y: y < 1, x))
obj = block.Apply(function=g)
obj.write(0)
(y,) = obj.read()
self.assertTrue(y == True)
obj.write(0, 0.5)
(y,) = obj.read()
self.assertTrue(y == True)
obj.write(1)
(y,) = obj.read()
self.assertTrue(y == False)
obj.write(0, 1)
(y,) = obj.read()
self.assertTrue(y == False)
def test_map(self):
def f(i):
return i + 1
obj = block.Map(function=f)
obj.write(0)
(y,) = obj.read()
self.assertTrue(y == 1)
obj.write(0, 2)
y = obj.read()
self.assertTrue(y == (1, 3))
obj.write()
y = obj.read()
self.assertTrue(y == ())
def test_html(self):
obj = block.ShortCircuit()
self.assertEqual(obj.html(),
'<dl><dt>mux</dt><dd>False</dd><dt>demux</dt><dd>False</dd><dt>enabled</dt><dd>True</dd></dl>')
def test_wrap():
import numpy as np
obj = block.Wrap()
for x in np.arange(0, 4 * np.pi, 0.01):
theta = np.mod(x, 2 * np.pi)
turns = np.floor_divide(x, 2 * np.pi)
# print('theta = {}, turns = {}'.format(theta, turns))
obj.write(theta)
(y,) = obj.read()
assert obj.theta == theta
assert obj.turns == turns
assert y == x
obj = block.Wrap()
for x in np.arange(0, 4 * np.pi, 0.01):
theta = np.mod(x + np.pi, 2 * np.pi) - np.pi
turns = np.floor_divide(x + np.pi, 2 * np.pi)
# print('x = {}, theta = {}, turns = {}'.format(x, theta, turns))
obj.write(theta)
(y,) = obj.read()
assert obj.theta == theta
assert obj.turns == turns
assert numpy.fabs(y - x) < 1e-4
obj = block.Wrap()
for x in np.arange(-np.pi, 3 * np.pi, 0.01):
theta = np.mod(x + np.pi, 2 * np.pi) - np.pi
turns = np.floor_divide(x + np.pi, 2 * np.pi)
obj.write(theta)
(y,) = obj.read()
assert obj.theta == theta
assert obj.turns == turns
assert numpy.fabs(y - x) < 1e-4
obj = block.Wrap()
for x in np.arange(4 * np.pi, 0, -0.01):
theta = np.mod(x, 2 * np.pi)
obj.write(theta)
(y,) = obj.read()
assert obj.theta == theta
assert numpy.fabs(y - x + 4 * np.pi) < 1e-4
obj.reset()
for x in np.arange(4 * np.pi, 0, -0.01):
theta = np.mod(x + np.pi, 2 * np.pi) - np.pi
obj.write(theta)
(y,) = obj.read()
assert obj.theta == theta
assert numpy.fabs(y - x + 4 * np.pi) < 1e-4
# degrees
obj = block.Wrap(scaling=360)
for x in np.arange(0, 4 * 360, 10):
theta = np.mod(x, 360)
turns = np.floor_divide(x, 360)
# print('theta = {}, turns = {}'.format(theta, turns))
obj.write(theta)
(y,) = obj.read()
assert obj.theta == theta
assert obj.turns == turns
assert y == x
# cycles
obj = block.Wrap(scaling=1)
for x in np.arange(0, 4, 10):
theta = np.mod(x, 1)
turns = np.floor_divide(x, 1)
# print('theta = {}, turns = {}'.format(theta, turns))
obj.write(theta)
(y,) = obj.read()
assert obj.theta == theta
assert obj.turns == turns
assert y == x
def test_Interp():
import numpy as np
t = np.array([0, 1, 2])
x = np.array([1, 0, 1])
obj = block.Interp(xp=x, fp=t)
for k in range(len(t)):
tk = t[k]
obj.write(tk)
(y,) = obj.read()
assert y == x[k]
obj.reset()
obj.write(0)
tk = 0.5
obj.write(tk)
(y,) = obj.read()
assert y == 0.5
tk = 1.5
obj.write(tk)
(y,) = obj.read()
assert y == 0.5
tk = 1.75
obj.write(tk)
(y,) = obj.read()
assert y == 0.75
def test_FadeIn():
import numpy as np
t = np.array([0, 1, 2, 3, 4])
yy = np.array([2, 2, 2, 2, 2])
x = np.array([1, 0.5, 0, 0, 0]) + (1 - np.array([1, 0.5, 0, 0, 0])) * yy
obj = block.Fade(target=1, period=2)
for k in range(len(t)):
tk = t[k]
obj.write(tk, yy[k])
(y,) = obj.read()
assert y == x[k]
obj.reset()
t = t + 2
for k in range(len(t)):
tk = t[k]
obj.write(tk, yy[k])
(y,) = obj.read()
assert y == x[k]
yy = np.array([2, 3, 4, 1, 0])
x = np.array([1, 0.5, 0, 0, 0]) + (1 - np.array([1, 0.5, 0, 0, 0])) * yy
obj = block.Fade(target=1, period=2)
for k in range(len(t)):
tk = t[k]
obj.write(tk, yy[k])
(y,) = obj.read()
assert y == x[k]
obj.reset()
t = t + 2
for k in range(len(t)):
tk = t[k]
obj.write(tk, yy[k])
(y,) = obj.read()
assert y == x[k]
obj.reset()
yy = np.array([4, 4, 4, 4, 4])
x = np.array([1, 0.5, 0, 0, 0]) + (1 - np.array([1, 0.5, 0, 0, 0])) * yy
t = t + 2
for k in range(len(t)):
tk = t[k]
obj.write(tk, yy[k])
(y,) = obj.read()
assert y == x[k]
yy = np.array([2, 2, 2, 2, 2])
x = np.array([0, 0, 0, 0, 0]) + (1 - np.array([1, 0.5, 0, 0, 0])) * yy
obj = block.Fade(period=2)
for k in range(len(t)):
tk = t[k]
obj.write(tk, yy[k])
(y,) = obj.read()
assert y == x[k]
yy = np.array([[2], [4]])
oo = np.array([[1], [0]])
x = np.dot(oo, np.array([[1, 0.5, 0, 0, 0]])) + np.dot(yy, (1 - np.array([[1, 0.5, 0, 0, 0]])))
obj = block.Fade(target=[1, 0], period=2)
for k in range(len(t)):
tk = t[k]
obj.write(tk, 2, 4)
y = obj.read()
assert not np.any(x[:, k] - numpy.array(y))
obj.reset()
t = t + 2
for k in range(len(t)):
tk = t[k]
obj.write(tk, 2, 4)
y = obj.read()
assert not np.any(x[:, k] - numpy.array(y))
yy = np.array([2, 3, 4, 1, 0])
x = (1 - np.array([1, 0.5, 0, 0, 0])) + np.array([1, 0.5, 0, 0, 0]) * yy
obj = block.Fade(target=1, direction='out', period=2)
for k in range(len(t)):
tk = t[k]
obj.write(tk, yy[k])
(y,) = obj.read()
assert y == x[k]
obj.reset()
t = t + 2
for k in range(len(t)):
tk = t[k]
obj.write(tk, yy[k])
(y,) = obj.read()
assert y == x[k]
if __name__ == '__main__':
unittest.main()
| 27.421779
| 120
| 0.492365
| 2,561
| 17,879
| 3.39633
| 0.057399
| 0.165785
| 0.045988
| 0.040009
| 0.851345
| 0.819729
| 0.783973
| 0.73902
| 0.713842
| 0.690848
| 0
| 0.046727
| 0.310532
| 17,879
| 651
| 121
| 27.463902
| 0.658879
| 0.028525
| 0
| 0.70614
| 0
| 0.002193
| 0.032111
| 0.005304
| 0
| 0
| 0
| 0
| 0.333333
| 1
| 0.030702
| false
| 0
| 0.019737
| 0.006579
| 0.059211
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 7
|
939c6914de42dfafe33d747cacea96e9461450ca
| 31,373
|
py
|
Python
|
sdk/python/pulumi_alicloud/ecs/reserved_instance.py
|
pulumi/pulumi-alicloud
|
9c34d84b4588a7c885c6bec1f03b5016e5a41683
|
[
"ECL-2.0",
"Apache-2.0"
] | 42
|
2019-03-18T06:34:37.000Z
|
2022-03-24T07:08:57.000Z
|
sdk/python/pulumi_alicloud/ecs/reserved_instance.py
|
pulumi/pulumi-alicloud
|
9c34d84b4588a7c885c6bec1f03b5016e5a41683
|
[
"ECL-2.0",
"Apache-2.0"
] | 152
|
2019-04-15T21:03:44.000Z
|
2022-03-29T18:00:57.000Z
|
sdk/python/pulumi_alicloud/ecs/reserved_instance.py
|
pulumi/pulumi-alicloud
|
9c34d84b4588a7c885c6bec1f03b5016e5a41683
|
[
"ECL-2.0",
"Apache-2.0"
] | 3
|
2020-08-26T17:30:07.000Z
|
2021-07-05T01:37:45.000Z
|
# coding=utf-8
# *** WARNING: this file was generated by the Pulumi Terraform Bridge (tfgen) Tool. ***
# *** Do not edit by hand unless you're certain you know what you are doing! ***
import warnings
import pulumi
import pulumi.runtime
from typing import Any, Mapping, Optional, Sequence, Union, overload
from .. import _utilities
__all__ = ['ReservedInstanceArgs', 'ReservedInstance']
@pulumi.input_type
class ReservedInstanceArgs:
def __init__(__self__, *,
instance_type: pulumi.Input[str],
description: Optional[pulumi.Input[str]] = None,
instance_amount: Optional[pulumi.Input[int]] = None,
name: Optional[pulumi.Input[str]] = None,
offering_type: Optional[pulumi.Input[str]] = None,
period: Optional[pulumi.Input[int]] = None,
period_unit: Optional[pulumi.Input[str]] = None,
platform: Optional[pulumi.Input[str]] = None,
resource_group_id: Optional[pulumi.Input[str]] = None,
scope: Optional[pulumi.Input[str]] = None,
zone_id: Optional[pulumi.Input[str]] = None):
"""
The set of arguments for constructing a ReservedInstance resource.
:param pulumi.Input[str] instance_type: Instance type of the RI. For more information, see [Instance type families](https://www.alibabacloud.com/help/doc-detail/25378.html).
:param pulumi.Input[str] description: Description of the RI. 2 to 256 English or Chinese characters. It cannot start with http:// or https://.
:param pulumi.Input[int] instance_amount: Number of instances allocated to an RI (An RI is a coupon that includes one or more allocated instances.).
:param pulumi.Input[str] name: Name of the RI. The name must be a string of 2 to 128 characters in length and can contain letters, numbers, colons (:), underscores (_), and hyphens. It must start with a letter. It cannot start with http:// or https://.
:param pulumi.Input[str] offering_type: Payment type of the RI. Optional values: `No Upfront`: No upfront payment is required., `Partial Upfront`: A portion of upfront payment is required.`All Upfront`: Full upfront payment is required.
:param pulumi.Input[str] period_unit: Term unit. Optional value: Year.
:param pulumi.Input[str] platform: The operating system type of the image used by the instance. Optional values: `Windows`, `Linux`. Default is `Linux`.
:param pulumi.Input[str] resource_group_id: Resource group ID.
:param pulumi.Input[str] scope: Scope of the RI. Optional values: `Region`: region-level, `Zone`: zone-level. Default is `Region`.
:param pulumi.Input[str] zone_id: ID of the zone to which the RI belongs. When Scope is set to Zone, this parameter is required. For information about the zone list, see [DescribeZones](https://www.alibabacloud.com/help/doc-detail/25610.html).
"""
pulumi.set(__self__, "instance_type", instance_type)
if description is not None:
pulumi.set(__self__, "description", description)
if instance_amount is not None:
pulumi.set(__self__, "instance_amount", instance_amount)
if name is not None:
pulumi.set(__self__, "name", name)
if offering_type is not None:
pulumi.set(__self__, "offering_type", offering_type)
if period is not None:
pulumi.set(__self__, "period", period)
if period_unit is not None:
pulumi.set(__self__, "period_unit", period_unit)
if platform is not None:
pulumi.set(__self__, "platform", platform)
if resource_group_id is not None:
pulumi.set(__self__, "resource_group_id", resource_group_id)
if scope is not None:
pulumi.set(__self__, "scope", scope)
if zone_id is not None:
pulumi.set(__self__, "zone_id", zone_id)
@property
@pulumi.getter(name="instanceType")
def instance_type(self) -> pulumi.Input[str]:
"""
Instance type of the RI. For more information, see [Instance type families](https://www.alibabacloud.com/help/doc-detail/25378.html).
"""
return pulumi.get(self, "instance_type")
@instance_type.setter
def instance_type(self, value: pulumi.Input[str]):
pulumi.set(self, "instance_type", value)
@property
@pulumi.getter
def description(self) -> Optional[pulumi.Input[str]]:
"""
Description of the RI. 2 to 256 English or Chinese characters. It cannot start with http:// or https://.
"""
return pulumi.get(self, "description")
@description.setter
def description(self, value: Optional[pulumi.Input[str]]):
pulumi.set(self, "description", value)
@property
@pulumi.getter(name="instanceAmount")
def instance_amount(self) -> Optional[pulumi.Input[int]]:
"""
Number of instances allocated to an RI (An RI is a coupon that includes one or more allocated instances.).
"""
return pulumi.get(self, "instance_amount")
@instance_amount.setter
def instance_amount(self, value: Optional[pulumi.Input[int]]):
pulumi.set(self, "instance_amount", value)
@property
@pulumi.getter
def name(self) -> Optional[pulumi.Input[str]]:
"""
Name of the RI. The name must be a string of 2 to 128 characters in length and can contain letters, numbers, colons (:), underscores (_), and hyphens. It must start with a letter. It cannot start with http:// or https://.
"""
return pulumi.get(self, "name")
@name.setter
def name(self, value: Optional[pulumi.Input[str]]):
pulumi.set(self, "name", value)
@property
@pulumi.getter(name="offeringType")
def offering_type(self) -> Optional[pulumi.Input[str]]:
"""
Payment type of the RI. Optional values: `No Upfront`: No upfront payment is required., `Partial Upfront`: A portion of upfront payment is required.`All Upfront`: Full upfront payment is required.
"""
return pulumi.get(self, "offering_type")
@offering_type.setter
def offering_type(self, value: Optional[pulumi.Input[str]]):
pulumi.set(self, "offering_type", value)
@property
@pulumi.getter
def period(self) -> Optional[pulumi.Input[int]]:
return pulumi.get(self, "period")
@period.setter
def period(self, value: Optional[pulumi.Input[int]]):
pulumi.set(self, "period", value)
@property
@pulumi.getter(name="periodUnit")
def period_unit(self) -> Optional[pulumi.Input[str]]:
"""
Term unit. Optional value: Year.
"""
return pulumi.get(self, "period_unit")
@period_unit.setter
def period_unit(self, value: Optional[pulumi.Input[str]]):
pulumi.set(self, "period_unit", value)
@property
@pulumi.getter
def platform(self) -> Optional[pulumi.Input[str]]:
"""
The operating system type of the image used by the instance. Optional values: `Windows`, `Linux`. Default is `Linux`.
"""
return pulumi.get(self, "platform")
@platform.setter
def platform(self, value: Optional[pulumi.Input[str]]):
pulumi.set(self, "platform", value)
@property
@pulumi.getter(name="resourceGroupId")
def resource_group_id(self) -> Optional[pulumi.Input[str]]:
"""
Resource group ID.
"""
return pulumi.get(self, "resource_group_id")
@resource_group_id.setter
def resource_group_id(self, value: Optional[pulumi.Input[str]]):
pulumi.set(self, "resource_group_id", value)
@property
@pulumi.getter
def scope(self) -> Optional[pulumi.Input[str]]:
"""
Scope of the RI. Optional values: `Region`: region-level, `Zone`: zone-level. Default is `Region`.
"""
return pulumi.get(self, "scope")
@scope.setter
def scope(self, value: Optional[pulumi.Input[str]]):
pulumi.set(self, "scope", value)
@property
@pulumi.getter(name="zoneId")
def zone_id(self) -> Optional[pulumi.Input[str]]:
"""
ID of the zone to which the RI belongs. When Scope is set to Zone, this parameter is required. For information about the zone list, see [DescribeZones](https://www.alibabacloud.com/help/doc-detail/25610.html).
"""
return pulumi.get(self, "zone_id")
@zone_id.setter
def zone_id(self, value: Optional[pulumi.Input[str]]):
pulumi.set(self, "zone_id", value)
@pulumi.input_type
class _ReservedInstanceState:
def __init__(__self__, *,
description: Optional[pulumi.Input[str]] = None,
instance_amount: Optional[pulumi.Input[int]] = None,
instance_type: Optional[pulumi.Input[str]] = None,
name: Optional[pulumi.Input[str]] = None,
offering_type: Optional[pulumi.Input[str]] = None,
period: Optional[pulumi.Input[int]] = None,
period_unit: Optional[pulumi.Input[str]] = None,
platform: Optional[pulumi.Input[str]] = None,
resource_group_id: Optional[pulumi.Input[str]] = None,
scope: Optional[pulumi.Input[str]] = None,
zone_id: Optional[pulumi.Input[str]] = None):
"""
Input properties used for looking up and filtering ReservedInstance resources.
:param pulumi.Input[str] description: Description of the RI. 2 to 256 English or Chinese characters. It cannot start with http:// or https://.
:param pulumi.Input[int] instance_amount: Number of instances allocated to an RI (An RI is a coupon that includes one or more allocated instances.).
:param pulumi.Input[str] instance_type: Instance type of the RI. For more information, see [Instance type families](https://www.alibabacloud.com/help/doc-detail/25378.html).
:param pulumi.Input[str] name: Name of the RI. The name must be a string of 2 to 128 characters in length and can contain letters, numbers, colons (:), underscores (_), and hyphens. It must start with a letter. It cannot start with http:// or https://.
:param pulumi.Input[str] offering_type: Payment type of the RI. Optional values: `No Upfront`: No upfront payment is required., `Partial Upfront`: A portion of upfront payment is required.`All Upfront`: Full upfront payment is required.
:param pulumi.Input[str] period_unit: Term unit. Optional value: Year.
:param pulumi.Input[str] platform: The operating system type of the image used by the instance. Optional values: `Windows`, `Linux`. Default is `Linux`.
:param pulumi.Input[str] resource_group_id: Resource group ID.
:param pulumi.Input[str] scope: Scope of the RI. Optional values: `Region`: region-level, `Zone`: zone-level. Default is `Region`.
:param pulumi.Input[str] zone_id: ID of the zone to which the RI belongs. When Scope is set to Zone, this parameter is required. For information about the zone list, see [DescribeZones](https://www.alibabacloud.com/help/doc-detail/25610.html).
"""
if description is not None:
pulumi.set(__self__, "description", description)
if instance_amount is not None:
pulumi.set(__self__, "instance_amount", instance_amount)
if instance_type is not None:
pulumi.set(__self__, "instance_type", instance_type)
if name is not None:
pulumi.set(__self__, "name", name)
if offering_type is not None:
pulumi.set(__self__, "offering_type", offering_type)
if period is not None:
pulumi.set(__self__, "period", period)
if period_unit is not None:
pulumi.set(__self__, "period_unit", period_unit)
if platform is not None:
pulumi.set(__self__, "platform", platform)
if resource_group_id is not None:
pulumi.set(__self__, "resource_group_id", resource_group_id)
if scope is not None:
pulumi.set(__self__, "scope", scope)
if zone_id is not None:
pulumi.set(__self__, "zone_id", zone_id)
@property
@pulumi.getter
def description(self) -> Optional[pulumi.Input[str]]:
"""
Description of the RI. 2 to 256 English or Chinese characters. It cannot start with http:// or https://.
"""
return pulumi.get(self, "description")
@description.setter
def description(self, value: Optional[pulumi.Input[str]]):
pulumi.set(self, "description", value)
@property
@pulumi.getter(name="instanceAmount")
def instance_amount(self) -> Optional[pulumi.Input[int]]:
"""
Number of instances allocated to an RI (An RI is a coupon that includes one or more allocated instances.).
"""
return pulumi.get(self, "instance_amount")
@instance_amount.setter
def instance_amount(self, value: Optional[pulumi.Input[int]]):
pulumi.set(self, "instance_amount", value)
@property
@pulumi.getter(name="instanceType")
def instance_type(self) -> Optional[pulumi.Input[str]]:
"""
Instance type of the RI. For more information, see [Instance type families](https://www.alibabacloud.com/help/doc-detail/25378.html).
"""
return pulumi.get(self, "instance_type")
@instance_type.setter
def instance_type(self, value: Optional[pulumi.Input[str]]):
pulumi.set(self, "instance_type", value)
@property
@pulumi.getter
def name(self) -> Optional[pulumi.Input[str]]:
"""
Name of the RI. The name must be a string of 2 to 128 characters in length and can contain letters, numbers, colons (:), underscores (_), and hyphens. It must start with a letter. It cannot start with http:// or https://.
"""
return pulumi.get(self, "name")
@name.setter
def name(self, value: Optional[pulumi.Input[str]]):
pulumi.set(self, "name", value)
@property
@pulumi.getter(name="offeringType")
def offering_type(self) -> Optional[pulumi.Input[str]]:
"""
Payment type of the RI. Optional values: `No Upfront`: No upfront payment is required., `Partial Upfront`: A portion of upfront payment is required.`All Upfront`: Full upfront payment is required.
"""
return pulumi.get(self, "offering_type")
@offering_type.setter
def offering_type(self, value: Optional[pulumi.Input[str]]):
pulumi.set(self, "offering_type", value)
@property
@pulumi.getter
def period(self) -> Optional[pulumi.Input[int]]:
return pulumi.get(self, "period")
@period.setter
def period(self, value: Optional[pulumi.Input[int]]):
pulumi.set(self, "period", value)
@property
@pulumi.getter(name="periodUnit")
def period_unit(self) -> Optional[pulumi.Input[str]]:
"""
Term unit. Optional value: Year.
"""
return pulumi.get(self, "period_unit")
@period_unit.setter
def period_unit(self, value: Optional[pulumi.Input[str]]):
pulumi.set(self, "period_unit", value)
@property
@pulumi.getter
def platform(self) -> Optional[pulumi.Input[str]]:
"""
The operating system type of the image used by the instance. Optional values: `Windows`, `Linux`. Default is `Linux`.
"""
return pulumi.get(self, "platform")
@platform.setter
def platform(self, value: Optional[pulumi.Input[str]]):
pulumi.set(self, "platform", value)
@property
@pulumi.getter(name="resourceGroupId")
def resource_group_id(self) -> Optional[pulumi.Input[str]]:
"""
Resource group ID.
"""
return pulumi.get(self, "resource_group_id")
@resource_group_id.setter
def resource_group_id(self, value: Optional[pulumi.Input[str]]):
pulumi.set(self, "resource_group_id", value)
@property
@pulumi.getter
def scope(self) -> Optional[pulumi.Input[str]]:
"""
Scope of the RI. Optional values: `Region`: region-level, `Zone`: zone-level. Default is `Region`.
"""
return pulumi.get(self, "scope")
@scope.setter
def scope(self, value: Optional[pulumi.Input[str]]):
pulumi.set(self, "scope", value)
@property
@pulumi.getter(name="zoneId")
def zone_id(self) -> Optional[pulumi.Input[str]]:
"""
ID of the zone to which the RI belongs. When Scope is set to Zone, this parameter is required. For information about the zone list, see [DescribeZones](https://www.alibabacloud.com/help/doc-detail/25610.html).
"""
return pulumi.get(self, "zone_id")
@zone_id.setter
def zone_id(self, value: Optional[pulumi.Input[str]]):
pulumi.set(self, "zone_id", value)
class ReservedInstance(pulumi.CustomResource):
@overload
def __init__(__self__,
resource_name: str,
opts: Optional[pulumi.ResourceOptions] = None,
description: Optional[pulumi.Input[str]] = None,
instance_amount: Optional[pulumi.Input[int]] = None,
instance_type: Optional[pulumi.Input[str]] = None,
name: Optional[pulumi.Input[str]] = None,
offering_type: Optional[pulumi.Input[str]] = None,
period: Optional[pulumi.Input[int]] = None,
period_unit: Optional[pulumi.Input[str]] = None,
platform: Optional[pulumi.Input[str]] = None,
resource_group_id: Optional[pulumi.Input[str]] = None,
scope: Optional[pulumi.Input[str]] = None,
zone_id: Optional[pulumi.Input[str]] = None,
__props__=None):
"""
Provides an Reserved Instance resource.
> **NOTE:** Available in 1.65.0+
## Example Usage
```python
import pulumi
import pulumi_alicloud as alicloud
default = alicloud.ecs.ReservedInstance("default",
instance_type="ecs.g6.large",
instance_amount=1,
period_unit="Year",
offering_type="All Upfront",
description="ReservedInstance",
zone_id="cn-hangzhou-h",
scope="Zone",
period=1)
```
## Import
reservedInstance can be imported using id, e.g.
```sh
$ pulumi import alicloud:ecs/reservedInstance:ReservedInstance default ecsri-uf6df4xm0h3licit****
```
:param str resource_name: The name of the resource.
:param pulumi.ResourceOptions opts: Options for the resource.
:param pulumi.Input[str] description: Description of the RI. 2 to 256 English or Chinese characters. It cannot start with http:// or https://.
:param pulumi.Input[int] instance_amount: Number of instances allocated to an RI (An RI is a coupon that includes one or more allocated instances.).
:param pulumi.Input[str] instance_type: Instance type of the RI. For more information, see [Instance type families](https://www.alibabacloud.com/help/doc-detail/25378.html).
:param pulumi.Input[str] name: Name of the RI. The name must be a string of 2 to 128 characters in length and can contain letters, numbers, colons (:), underscores (_), and hyphens. It must start with a letter. It cannot start with http:// or https://.
:param pulumi.Input[str] offering_type: Payment type of the RI. Optional values: `No Upfront`: No upfront payment is required., `Partial Upfront`: A portion of upfront payment is required.`All Upfront`: Full upfront payment is required.
:param pulumi.Input[str] period_unit: Term unit. Optional value: Year.
:param pulumi.Input[str] platform: The operating system type of the image used by the instance. Optional values: `Windows`, `Linux`. Default is `Linux`.
:param pulumi.Input[str] resource_group_id: Resource group ID.
:param pulumi.Input[str] scope: Scope of the RI. Optional values: `Region`: region-level, `Zone`: zone-level. Default is `Region`.
:param pulumi.Input[str] zone_id: ID of the zone to which the RI belongs. When Scope is set to Zone, this parameter is required. For information about the zone list, see [DescribeZones](https://www.alibabacloud.com/help/doc-detail/25610.html).
"""
...
@overload
def __init__(__self__,
resource_name: str,
args: ReservedInstanceArgs,
opts: Optional[pulumi.ResourceOptions] = None):
"""
Provides an Reserved Instance resource.
> **NOTE:** Available in 1.65.0+
## Example Usage
```python
import pulumi
import pulumi_alicloud as alicloud
default = alicloud.ecs.ReservedInstance("default",
instance_type="ecs.g6.large",
instance_amount=1,
period_unit="Year",
offering_type="All Upfront",
description="ReservedInstance",
zone_id="cn-hangzhou-h",
scope="Zone",
period=1)
```
## Import
reservedInstance can be imported using id, e.g.
```sh
$ pulumi import alicloud:ecs/reservedInstance:ReservedInstance default ecsri-uf6df4xm0h3licit****
```
:param str resource_name: The name of the resource.
:param ReservedInstanceArgs args: The arguments to use to populate this resource's properties.
:param pulumi.ResourceOptions opts: Options for the resource.
"""
...
def __init__(__self__, resource_name: str, *args, **kwargs):
resource_args, opts = _utilities.get_resource_args_opts(ReservedInstanceArgs, pulumi.ResourceOptions, *args, **kwargs)
if resource_args is not None:
__self__._internal_init(resource_name, opts, **resource_args.__dict__)
else:
__self__._internal_init(resource_name, *args, **kwargs)
def _internal_init(__self__,
resource_name: str,
opts: Optional[pulumi.ResourceOptions] = None,
description: Optional[pulumi.Input[str]] = None,
instance_amount: Optional[pulumi.Input[int]] = None,
instance_type: Optional[pulumi.Input[str]] = None,
name: Optional[pulumi.Input[str]] = None,
offering_type: Optional[pulumi.Input[str]] = None,
period: Optional[pulumi.Input[int]] = None,
period_unit: Optional[pulumi.Input[str]] = None,
platform: Optional[pulumi.Input[str]] = None,
resource_group_id: Optional[pulumi.Input[str]] = None,
scope: Optional[pulumi.Input[str]] = None,
zone_id: Optional[pulumi.Input[str]] = None,
__props__=None):
if opts is None:
opts = pulumi.ResourceOptions()
if not isinstance(opts, pulumi.ResourceOptions):
raise TypeError('Expected resource options to be a ResourceOptions instance')
if opts.version is None:
opts.version = _utilities.get_version()
if opts.id is None:
if __props__ is not None:
raise TypeError('__props__ is only valid when passed in combination with a valid opts.id to get an existing resource')
__props__ = ReservedInstanceArgs.__new__(ReservedInstanceArgs)
__props__.__dict__["description"] = description
__props__.__dict__["instance_amount"] = instance_amount
if instance_type is None and not opts.urn:
raise TypeError("Missing required property 'instance_type'")
__props__.__dict__["instance_type"] = instance_type
__props__.__dict__["name"] = name
__props__.__dict__["offering_type"] = offering_type
__props__.__dict__["period"] = period
__props__.__dict__["period_unit"] = period_unit
__props__.__dict__["platform"] = platform
__props__.__dict__["resource_group_id"] = resource_group_id
__props__.__dict__["scope"] = scope
__props__.__dict__["zone_id"] = zone_id
super(ReservedInstance, __self__).__init__(
'alicloud:ecs/reservedInstance:ReservedInstance',
resource_name,
__props__,
opts)
@staticmethod
def get(resource_name: str,
id: pulumi.Input[str],
opts: Optional[pulumi.ResourceOptions] = None,
description: Optional[pulumi.Input[str]] = None,
instance_amount: Optional[pulumi.Input[int]] = None,
instance_type: Optional[pulumi.Input[str]] = None,
name: Optional[pulumi.Input[str]] = None,
offering_type: Optional[pulumi.Input[str]] = None,
period: Optional[pulumi.Input[int]] = None,
period_unit: Optional[pulumi.Input[str]] = None,
platform: Optional[pulumi.Input[str]] = None,
resource_group_id: Optional[pulumi.Input[str]] = None,
scope: Optional[pulumi.Input[str]] = None,
zone_id: Optional[pulumi.Input[str]] = None) -> 'ReservedInstance':
"""
Get an existing ReservedInstance resource's state with the given name, id, and optional extra
properties used to qualify the lookup.
:param str resource_name: The unique name of the resulting resource.
:param pulumi.Input[str] id: The unique provider ID of the resource to lookup.
:param pulumi.ResourceOptions opts: Options for the resource.
:param pulumi.Input[str] description: Description of the RI. 2 to 256 English or Chinese characters. It cannot start with http:// or https://.
:param pulumi.Input[int] instance_amount: Number of instances allocated to an RI (An RI is a coupon that includes one or more allocated instances.).
:param pulumi.Input[str] instance_type: Instance type of the RI. For more information, see [Instance type families](https://www.alibabacloud.com/help/doc-detail/25378.html).
:param pulumi.Input[str] name: Name of the RI. The name must be a string of 2 to 128 characters in length and can contain letters, numbers, colons (:), underscores (_), and hyphens. It must start with a letter. It cannot start with http:// or https://.
:param pulumi.Input[str] offering_type: Payment type of the RI. Optional values: `No Upfront`: No upfront payment is required., `Partial Upfront`: A portion of upfront payment is required.`All Upfront`: Full upfront payment is required.
:param pulumi.Input[str] period_unit: Term unit. Optional value: Year.
:param pulumi.Input[str] platform: The operating system type of the image used by the instance. Optional values: `Windows`, `Linux`. Default is `Linux`.
:param pulumi.Input[str] resource_group_id: Resource group ID.
:param pulumi.Input[str] scope: Scope of the RI. Optional values: `Region`: region-level, `Zone`: zone-level. Default is `Region`.
:param pulumi.Input[str] zone_id: ID of the zone to which the RI belongs. When Scope is set to Zone, this parameter is required. For information about the zone list, see [DescribeZones](https://www.alibabacloud.com/help/doc-detail/25610.html).
"""
opts = pulumi.ResourceOptions.merge(opts, pulumi.ResourceOptions(id=id))
__props__ = _ReservedInstanceState.__new__(_ReservedInstanceState)
__props__.__dict__["description"] = description
__props__.__dict__["instance_amount"] = instance_amount
__props__.__dict__["instance_type"] = instance_type
__props__.__dict__["name"] = name
__props__.__dict__["offering_type"] = offering_type
__props__.__dict__["period"] = period
__props__.__dict__["period_unit"] = period_unit
__props__.__dict__["platform"] = platform
__props__.__dict__["resource_group_id"] = resource_group_id
__props__.__dict__["scope"] = scope
__props__.__dict__["zone_id"] = zone_id
return ReservedInstance(resource_name, opts=opts, __props__=__props__)
@property
@pulumi.getter
def description(self) -> pulumi.Output[Optional[str]]:
"""
Description of the RI. 2 to 256 English or Chinese characters. It cannot start with http:// or https://.
"""
return pulumi.get(self, "description")
@property
@pulumi.getter(name="instanceAmount")
def instance_amount(self) -> pulumi.Output[int]:
"""
Number of instances allocated to an RI (An RI is a coupon that includes one or more allocated instances.).
"""
return pulumi.get(self, "instance_amount")
@property
@pulumi.getter(name="instanceType")
def instance_type(self) -> pulumi.Output[str]:
"""
Instance type of the RI. For more information, see [Instance type families](https://www.alibabacloud.com/help/doc-detail/25378.html).
"""
return pulumi.get(self, "instance_type")
@property
@pulumi.getter
def name(self) -> pulumi.Output[str]:
"""
Name of the RI. The name must be a string of 2 to 128 characters in length and can contain letters, numbers, colons (:), underscores (_), and hyphens. It must start with a letter. It cannot start with http:// or https://.
"""
return pulumi.get(self, "name")
@property
@pulumi.getter(name="offeringType")
def offering_type(self) -> pulumi.Output[Optional[str]]:
"""
Payment type of the RI. Optional values: `No Upfront`: No upfront payment is required., `Partial Upfront`: A portion of upfront payment is required.`All Upfront`: Full upfront payment is required.
"""
return pulumi.get(self, "offering_type")
@property
@pulumi.getter
def period(self) -> pulumi.Output[Optional[int]]:
return pulumi.get(self, "period")
@property
@pulumi.getter(name="periodUnit")
def period_unit(self) -> pulumi.Output[Optional[str]]:
"""
Term unit. Optional value: Year.
"""
return pulumi.get(self, "period_unit")
@property
@pulumi.getter
def platform(self) -> pulumi.Output[str]:
"""
The operating system type of the image used by the instance. Optional values: `Windows`, `Linux`. Default is `Linux`.
"""
return pulumi.get(self, "platform")
@property
@pulumi.getter(name="resourceGroupId")
def resource_group_id(self) -> pulumi.Output[str]:
"""
Resource group ID.
"""
return pulumi.get(self, "resource_group_id")
@property
@pulumi.getter
def scope(self) -> pulumi.Output[Optional[str]]:
"""
Scope of the RI. Optional values: `Region`: region-level, `Zone`: zone-level. Default is `Region`.
"""
return pulumi.get(self, "scope")
@property
@pulumi.getter(name="zoneId")
def zone_id(self) -> pulumi.Output[Optional[str]]:
"""
ID of the zone to which the RI belongs. When Scope is set to Zone, this parameter is required. For information about the zone list, see [DescribeZones](https://www.alibabacloud.com/help/doc-detail/25610.html).
"""
return pulumi.get(self, "zone_id")
| 47.534848
| 260
| 0.646607
| 3,881
| 31,373
| 5.060551
| 0.059263
| 0.080092
| 0.084827
| 0.087373
| 0.906976
| 0.894399
| 0.883961
| 0.879582
| 0.87169
| 0.859318
| 0
| 0.006265
| 0.241896
| 31,373
| 659
| 261
| 47.60698
| 0.819501
| 0.394447
| 0
| 0.843501
| 1
| 0
| 0.085427
| 0.002632
| 0
| 0
| 0
| 0
| 0
| 1
| 0.164456
| false
| 0.002653
| 0.013263
| 0.007958
| 0.275862
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 1
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 8
|
f539feea87c313c93d85279177ed7bdae6b2a9d3
| 25,202
|
py
|
Python
|
tests/python/test_normal_tx.py
|
sscqchain/go-xchain
|
3d4a3f6f10ce322ab8621d9c7ca51ee9e2bdfcd4
|
[
"Apache-2.0"
] | null | null | null |
tests/python/test_normal_tx.py
|
sscqchain/go-xchain
|
3d4a3f6f10ce322ab8621d9c7ca51ee9e2bdfcd4
|
[
"Apache-2.0"
] | null | null | null |
tests/python/test_normal_tx.py
|
sscqchain/go-xchain
|
3d4a3f6f10ce322ab8621d9c7ca51ee9e2bdfcd4
|
[
"Apache-2.0"
] | null | null | null |
import json
import subprocess
import time
import pytest
from pprint import pprint
from sscqsdk import SscqRPC, SscqTxBuilder, sscq_to_satoshi, Address, SscqPrivateKey
@pytest.fixture(scope="module", autouse=True)
def check_balance(conftest_args):
print("====> check_balance <=======")
sscqrpc = SscqRPC(chaid_id=conftest_args['CHAINID'], rpc_host=conftest_args['RPC_HOST'], rpc_port=conftest_args['RPC_PORT'])
from_addr = Address(conftest_args['ADDRESS'])
acc = sscqrpc.get_account_info(address=from_addr.address)
assert acc.balance_satoshi > sscq_to_satoshi(100000)
def test_get_params(conftest_args):
test_chain_id = conftest_args['CHAINID']
test_address = conftest_args['ADDRESS']
test_private_key = conftest_args['PRIVATE_KEY']
test_rpc_host = conftest_args['RPC_HOST']
test_rpc_port = conftest_args['RPC_PORT']
print(test_chain_id)
print(test_address)
print(test_private_key)
print(test_rpc_host)
print(test_rpc_port)
def test_normal_tx_send(conftest_args):
gas_wanted = 30000
gas_price = 100
tx_amount = 1
data = ''
memo = 'test_normal_transaction'
sscqrpc = SscqRPC(chaid_id=conftest_args['CHAINID'], rpc_host=conftest_args['RPC_HOST'], rpc_port=conftest_args['RPC_PORT'])
from_addr = Address(conftest_args['ADDRESS'])
new_to_addr = SscqPrivateKey('').address
private_key = SscqPrivateKey(conftest_args['PRIVATE_KEY'])
from_acc = sscqrpc.get_account_info(address=from_addr.address)
assert from_acc is not None
assert from_acc.balance_satoshi > gas_price * gas_wanted + tx_amount
signed_tx = SscqTxBuilder(
from_address=from_addr,
to_address=new_to_addr,
amount_satoshi=tx_amount,
sequence=from_acc.sequence,
account_number=from_acc.account_number,
chain_id=sscqrpc.chain_id,
gas_price=gas_price,
gas_wanted=gas_wanted,
data=data,
memo=memo
).build_and_sign(private_key=private_key)
tx_hash = sscqrpc.broadcast_tx(tx_hex=signed_tx)
print('tx_hash: {}'.format(tx_hash))
mempool = sscqrpc.get_mempool_trasactions()
pprint(mempool)
memtx = sscqrpc.get_mempool_transaction(transaction_hash=tx_hash)
pprint(memtx)
tx = sscqrpc.get_tranaction_until_timeout(transaction_hash=tx_hash)
pprint(tx)
tx = sscqrpc.get_transaction(transaction_hash=tx_hash)
assert tx['logs'][0]['success'] == True
assert tx['gas_wanted'] == str(gas_wanted)
assert tx['gas_used'] == str(gas_wanted)
tv = tx['tx']['value']
assert len(tv['msg']) == 1
assert tv['msg'][0]['type'] == 'sscqservice/send'
assert int(tv['fee']['gas_wanted']) == gas_wanted
assert int(tv['fee']['gas_price']) == gas_price
assert tv['memo'] == memo
mv = tv['msg'][0]['value']
assert mv['From'] == from_addr.address
assert mv['To'] == new_to_addr.address
assert mv['Data'] == data
assert int(mv['GasPrice']) == gas_price
assert int(mv['GasWanted']) == gas_wanted
assert 'satoshi' == mv['Amount'][0]['denom']
assert tx_amount == int(mv['Amount'][0]['amount'])
pprint(tx)
time.sleep(8) # wait for chain state update
to_acc = sscqrpc.get_account_info(address=new_to_addr.address)
assert to_acc is not None
assert to_acc.balance_satoshi == tx_amount
from_acc_new = sscqrpc.get_account_info(address=from_addr.address)
assert from_acc_new.address == from_acc.address
assert from_acc_new.sequence == from_acc.sequence + 1
assert from_acc_new.account_number == from_acc.account_number
assert from_acc_new.balance_satoshi == from_acc.balance_satoshi - (gas_price * gas_wanted + tx_amount)
def test_normal_tx_with_data(conftest_args):
# protocol_version = subprocess.getoutput('sscli query upgrade info --chain-id=testchain -o json | jq .current_version.UpgradeInfo.Protocol.version')
gas_wanted = 7500000
gas_price = 100
tx_amount = 1
data = 'ff' * 1000
memo = 'test_normal_transaction_with_data'
sscqrpc = SscqRPC(chaid_id=conftest_args['CHAINID'], rpc_host=conftest_args['RPC_HOST'], rpc_port=conftest_args['RPC_PORT'])
upgrade_info = sscqrpc.get_upgrade_info()
protocol_version = int(upgrade_info['current_version']['UpgradeInfo']['Protocol']['version'])
from_addr = Address(conftest_args['ADDRESS'])
new_to_addr = SscqPrivateKey('').address
# to_addr = Address('sscq1jrh6kxrcr0fd8gfgdwna8yyr9tkt99ggmz9ja2')
private_key = SscqPrivateKey(conftest_args['PRIVATE_KEY'])
from_acc = sscqrpc.get_account_info(address=from_addr.address)
assert from_acc is not None
assert from_acc.balance_satoshi > gas_price * gas_wanted + tx_amount
signed_tx = SscqTxBuilder(
from_address=from_addr,
to_address=new_to_addr,
amount_satoshi=tx_amount,
sequence=from_acc.sequence,
account_number=from_acc.account_number,
chain_id=sscqrpc.chain_id,
gas_price=gas_price,
gas_wanted=gas_wanted,
data=data,
memo=memo
).build_and_sign(private_key=private_key)
tx_hash = sscqrpc.broadcast_tx(tx_hex=signed_tx)
print('tx_hash: {}'.format(tx_hash))
mempool = sscqrpc.get_mempool_trasactions()
pprint(mempool)
memtx = sscqrpc.get_mempool_transaction(transaction_hash=tx_hash)
pprint(memtx)
tx = sscqrpc.get_tranaction_until_timeout(transaction_hash=tx_hash)
pprint(tx)
tx = sscqrpc.get_transaction(transaction_hash=tx_hash)
if protocol_version < 2: # v0 and v1
assert tx['logs'][0]['success'] == True
assert tx['gas_wanted'] == str(gas_wanted)
assert int(tx['gas_used']) < gas_wanted
tv = tx['tx']['value']
assert len(tv['msg']) == 1
assert tv['msg'][0]['type'] == 'sscqservice/send'
assert int(tv['fee']['gas_wanted']) == gas_wanted
assert int(tv['fee']['gas_price']) == gas_price
assert tv['memo'] == memo
mv = tv['msg'][0]['value']
assert mv['From'] == from_addr.address
assert mv['To'] == new_to_addr.address
assert mv['Data'] == data
assert int(mv['GasPrice']) == gas_price
assert int(mv['GasWanted']) == gas_wanted
assert 'satoshi' == mv['Amount'][0]['denom']
assert tx_amount == int(mv['Amount'][0]['amount'])
pprint(tx)
time.sleep(5) # want for chain state update
to_acc = sscqrpc.get_account_info(address=new_to_addr.address)
assert to_acc is not None
assert to_acc.balance_satoshi == tx_amount
from_acc_new = sscqrpc.get_account_info(address=from_addr.address)
assert from_acc_new.address == from_acc.address
assert from_acc_new.sequence == from_acc.sequence + 1
assert from_acc_new.account_number == from_acc.account_number
assert from_acc_new.balance_satoshi == from_acc.balance_satoshi - (gas_price * int(tx['gas_used']) + tx_amount)
elif protocol_version == 2: # v2
# because of `data` isn't empty. `to` must be correct contract address, if not,
# this transaction be failed in V2 handler
assert tx['logs'][0]['success'] == False
# Because of `data` is not empty, so v2's anteHander doesn't adjust tx's gasWanted.
assert tx['gas_wanted'] == str(gas_wanted)
# v2 DO NOT ALLOW `data` in normal sscq transaction,
# so evm execute tx failed, all the gas be consumed
assert tx['gas_used'] == str(gas_wanted)
tv = tx['tx']['value']
assert len(tv['msg']) == 1
assert tv['msg'][0]['type'] == 'sscqservice/send'
assert int(tv['fee']['gas_wanted']) == gas_wanted
assert int(tv['fee']['gas_price']) == gas_price
assert tv['memo'] == memo
mv = tv['msg'][0]['value']
assert mv['From'] == from_addr.address
assert mv['To'] == new_to_addr.address
assert mv['Data'] == data
assert int(mv['GasPrice']) == gas_price
assert int(mv['GasWanted']) == gas_wanted
assert 'satoshi' == mv['Amount'][0]['denom']
assert tx_amount == int(mv['Amount'][0]['amount'])
pprint(tx)
time.sleep(5) # wait for chain state update
to_acc = sscqrpc.get_account_info(address=new_to_addr.address)
assert to_acc is None
from_acc_new = sscqrpc.get_account_info(address=from_addr.address)
assert from_acc_new.address == from_acc.address
assert from_acc_new.sequence == from_acc.sequence + 1
assert from_acc_new.account_number == from_acc.account_number
assert from_acc_new.balance_satoshi == from_acc.balance_satoshi - (gas_price * gas_wanted)
else:
raise Exception("invalid protocol version {}".format(protocol_version))
pass
def test_txsize_excess_100000bytes(conftest_args):
gas_wanted = 7500000
gas_price = 100
tx_amount = 1
# in protocol v0 v1, TxSizeLimit is 1200000 bytes
# in protocol V2, TxSizeLimit is 100000 bytes
data = 'ff' * 50000
memo = 'test_normal_transaction_with_data_excess_100000bytes'
sscqrpc = SscqRPC(chaid_id=conftest_args['CHAINID'], rpc_host=conftest_args['RPC_HOST'], rpc_port=conftest_args['RPC_PORT'])
upgrade_info = sscqrpc.get_upgrade_info()
protocol_version = int(upgrade_info['current_version']['UpgradeInfo']['Protocol']['version'])
from_addr = Address(conftest_args['ADDRESS'])
new_to_addr = SscqPrivateKey('').address
# to_addr = Address('sscq1jrh6kxrcr0fd8gfgdwna8yyr9tkt99ggmz9ja2')
private_key = SscqPrivateKey(conftest_args['PRIVATE_KEY'])
from_acc = sscqrpc.get_account_info(address=from_addr.address)
assert from_acc is not None
assert from_acc.balance_satoshi > gas_price * gas_wanted + tx_amount
signed_tx = SscqTxBuilder(
from_address=from_addr,
to_address=new_to_addr,
amount_satoshi=tx_amount,
sequence=from_acc.sequence,
account_number=from_acc.account_number,
chain_id=sscqrpc.chain_id,
gas_price=gas_price,
gas_wanted=gas_wanted,
data=data,
memo=memo
).build_and_sign(private_key=private_key)
if protocol_version < 2: # v0 and v1
# TODO:
pass
elif protocol_version == 2: # v2
try:
tx_hash = sscqrpc.broadcast_tx(tx_hex=signed_tx)
print('tx_hash: {}'.format(tx_hash))
assert True == False
except Exception as e:
errmsg = '{}'.format(e)
print(e)
pass
else:
raise Exception("invalid protocol version {}".format(protocol_version))
pass
def test_normal_tx_gas_wanted_adjust(conftest_args):
# in protocol V2, if gasWanted is greater than 210000, anteHandler will adjust tx's gasWanted to 30000
# in protocol V2, max gasWanted is 7500000
gas_wanted = 210001
# normal sscq send tx gas_used is 30000
normal_send_tx_gas_wanted = 30000
gas_price = 100
tx_amount = 1
data = ''
memo = 'test_normal_transaction_gas_wanted'
sscqrpc = SscqRPC(chaid_id=conftest_args['CHAINID'], rpc_host=conftest_args['RPC_HOST'], rpc_port=conftest_args['RPC_PORT'])
upgrade_info = sscqrpc.get_upgrade_info()
protocol_version = int(upgrade_info['current_version']['UpgradeInfo']['Protocol']['version'])
from_addr = Address(conftest_args['ADDRESS'])
new_to_addr = SscqPrivateKey('').address
private_key = SscqPrivateKey(conftest_args['PRIVATE_KEY'])
from_acc = sscqrpc.get_account_info(address=from_addr.address)
assert from_acc is not None
assert from_acc.balance_satoshi > gas_price * gas_wanted + tx_amount
signed_tx = SscqTxBuilder(
from_address=from_addr,
to_address=new_to_addr,
amount_satoshi=tx_amount,
sequence=from_acc.sequence,
account_number=from_acc.account_number,
chain_id=sscqrpc.chain_id,
gas_price=gas_price,
gas_wanted=gas_wanted,
data=data,
memo=memo
).build_and_sign(private_key=private_key)
tx_hash = sscqrpc.broadcast_tx(tx_hex=signed_tx)
print('tx_hash: {}'.format(tx_hash))
mempool = sscqrpc.get_mempool_trasactions()
pprint(mempool)
memtx = sscqrpc.get_mempool_transaction(transaction_hash=tx_hash)
pprint(memtx)
tx = sscqrpc.get_tranaction_until_timeout(transaction_hash=tx_hash)
pprint(tx)
tx = sscqrpc.get_transaction(transaction_hash=tx_hash)
if protocol_version < 2: # v0 and v1
assert tx['logs'][0]['success'] == True
assert tx['gas_wanted'] == str(gas_wanted)
assert int(tx['gas_used']) < gas_wanted
assert int(tx['gas_used']) == normal_send_tx_gas_wanted
tv = tx['tx']['value']
assert len(tv['msg']) == 1
assert tv['msg'][0]['type'] == 'sscqservice/send'
assert int(tv['fee']['gas_wanted']) == gas_wanted
assert int(tv['fee']['gas_price']) == gas_price
assert tv['memo'] == memo
mv = tv['msg'][0]['value']
assert mv['From'] == from_addr.address
assert mv['To'] == new_to_addr.address
assert mv['Data'] == data
assert int(mv['GasPrice']) == gas_price
assert int(mv['GasWanted']) == gas_wanted
assert 'satoshi' == mv['Amount'][0]['denom']
assert tx_amount == int(mv['Amount'][0]['amount'])
pprint(tx)
time.sleep(5) # want for chain state update
to_acc = sscqrpc.get_account_info(address=new_to_addr.address)
assert to_acc is not None
assert to_acc.balance_satoshi == tx_amount
from_acc_new = sscqrpc.get_account_info(address=from_addr.address)
assert from_acc_new.address == from_acc.address
assert from_acc_new.sequence == from_acc.sequence + 1
assert from_acc_new.account_number == from_acc.account_number
assert from_acc_new.balance_satoshi == from_acc.balance_satoshi - (gas_price * int(tx['gas_used']) + tx_amount)
elif protocol_version == 2: # v2 ,
# if gasWanted is greater than 210000, anteHandler will adjust tx's gasWanted to 30000
assert tx['logs'][0]['success'] == True
# Because of `data` is empty, so v2's anteHander adjusts tx's gasWanted to 30000.
assert int(tx['gas_wanted']) == normal_send_tx_gas_wanted
assert int(tx['gas_used']) == normal_send_tx_gas_wanted
tv = tx['tx']['value']
assert len(tv['msg']) == 1
assert tv['msg'][0]['type'] == 'sscqservice/send'
assert int(tv['fee']['gas_wanted']) == gas_wanted
assert int(tv['fee']['gas_price']) == gas_price
assert tv['memo'] == memo
mv = tv['msg'][0]['value']
assert mv['From'] == from_addr.address
assert mv['To'] == new_to_addr.address
assert mv['Data'] == data
assert int(mv['GasPrice']) == gas_price
assert int(mv['GasWanted']) == gas_wanted
assert 'satoshi' == mv['Amount'][0]['denom']
assert tx_amount == int(mv['Amount'][0]['amount'])
pprint(tx)
time.sleep(5) # wait for chain state update
to_acc = sscqrpc.get_account_info(address=new_to_addr.address)
assert to_acc is not None
from_acc_new = sscqrpc.get_account_info(address=from_addr.address)
assert from_acc_new.address == from_acc.address
assert from_acc_new.sequence == from_acc.sequence + 1
assert from_acc_new.account_number == from_acc.account_number
assert from_acc_new.balance_satoshi == from_acc.balance_satoshi - (
gas_price * normal_send_tx_gas_wanted + tx_amount)
else:
raise Exception("invalid protocol version {}".format(protocol_version))
pass
def test_normal_tx_gas_wanted_excess_7500000(conftest_args):
gas_wanted = 7500001 # v2 max gas_wanted is 7500000
gas_price = 100
tx_amount = 1
data = ''
memo = 'test_normal_transaction_gas_wanted_excess_7500000'
sscqrpc = SscqRPC(chaid_id=conftest_args['CHAINID'], rpc_host=conftest_args['RPC_HOST'], rpc_port=conftest_args['RPC_PORT'])
upgrade_info = sscqrpc.get_upgrade_info()
protocol_version = int(upgrade_info['current_version']['UpgradeInfo']['Protocol']['version'])
from_addr = Address(conftest_args['ADDRESS'])
new_to_addr = SscqPrivateKey('').address
private_key = SscqPrivateKey(conftest_args['PRIVATE_KEY'])
from_acc = sscqrpc.get_account_info(address=from_addr.address)
assert from_acc is not None
assert from_acc.balance_satoshi > gas_price * gas_wanted + tx_amount
signed_tx = SscqTxBuilder(
from_address=from_addr,
to_address=new_to_addr,
amount_satoshi=tx_amount,
sequence=from_acc.sequence,
account_number=from_acc.account_number,
chain_id=sscqrpc.chain_id,
gas_price=gas_price,
gas_wanted=gas_wanted,
data=data,
memo=memo
).build_and_sign(private_key=private_key)
tx_hash = ''
try:
tx_hash = sscqrpc.broadcast_tx(tx_hex=signed_tx)
print('tx_hash: {}'.format(tx_hash))
except Exception as e:
assert protocol_version == 2
errmsg = '{}'.format(e)
print(e)
assert 'Tx could not excess TxGasLimit[7500000]' in errmsg
if protocol_version < 2:
tx = sscqrpc.get_tranaction_until_timeout(transaction_hash=tx_hash)
pprint(tx)
assert tx['logs'][0]['success'] == True
assert tx['gas_wanted'] == str(gas_wanted)
assert int(tx['gas_used']) < gas_wanted
tv = tx['tx']['value']
assert len(tv['msg']) == 1
assert tv['msg'][0]['type'] == 'sscqservice/send'
assert int(tv['fee']['gas_wanted']) == gas_wanted
assert int(tv['fee']['gas_price']) == gas_price
assert tv['memo'] == memo
mv = tv['msg'][0]['value']
assert mv['From'] == from_addr.address
assert mv['To'] == new_to_addr.address
assert mv['Data'] == data
assert int(mv['GasPrice']) == gas_price
assert int(mv['GasWanted']) == gas_wanted
assert 'satoshi' == mv['Amount'][0]['denom']
assert tx_amount == int(mv['Amount'][0]['amount'])
pprint(tx)
time.sleep(5) # wait for chain state update
to_acc = sscqrpc.get_account_info(address=new_to_addr.address)
assert to_acc is not None
assert to_acc.balance_satoshi == tx_amount
from_acc_new = sscqrpc.get_account_info(address=from_addr.address)
assert from_acc_new.address == from_acc.address
assert from_acc_new.sequence == from_acc.sequence + 1
assert from_acc_new.account_number == from_acc.account_number
assert from_acc_new.balance_satoshi == from_acc.balance_satoshi - (gas_price * int(tx['gas_used']) + tx_amount)
pass
def test_balance_less_than_fee_tx(conftest_args):
"""
test for issue #6
In protocol v0 and v1 , if a account's balance less than fee( gas_wanted * gas_price)
its transactions still could be included into a block.
In protocol v2, if a account's balance less than fee(gas_wanted * gas_price), its transaction
will be rejected when it be broadcasted.
"""
gas_wanted = 30000
gas_price = 100
tx_amount = 1
data = ''
memo = 'test_balance_less_than_fee_tx'
sscqrpc = SscqRPC(chaid_id=conftest_args['CHAINID'], rpc_host=conftest_args['RPC_HOST'], rpc_port=conftest_args['RPC_PORT'])
upgrade_info = sscqrpc.get_upgrade_info()
protocol_version = int(upgrade_info['current_version']['UpgradeInfo']['Protocol']['version'])
from_addr = Address(conftest_args['ADDRESS'])
new_to_privkey = SscqPrivateKey('')
new_to_addr = new_to_privkey.address
private_key = SscqPrivateKey(conftest_args['PRIVATE_KEY'])
from_acc = sscqrpc.get_account_info(address=from_addr.address)
assert from_acc is not None
assert from_acc.balance_satoshi > gas_price * gas_wanted + tx_amount
signed_tx = SscqTxBuilder(
from_address=from_addr,
to_address=new_to_addr,
amount_satoshi=tx_amount,
sequence=from_acc.sequence,
account_number=from_acc.account_number,
chain_id=sscqrpc.chain_id,
gas_price=gas_price,
gas_wanted=gas_wanted,
data=data,
memo=memo
).build_and_sign(private_key=private_key)
tx_hash = sscqrpc.broadcast_tx(tx_hex=signed_tx)
print('tx_hash: {}'.format(tx_hash))
tx = sscqrpc.get_tranaction_until_timeout(transaction_hash=tx_hash)
assert tx['logs'][0]['success'] == True
time.sleep(5) # wait for chain state update
to_acc = sscqrpc.get_account_info(address=new_to_addr.address)
assert to_acc is not None
assert to_acc.balance_satoshi == tx_amount
signed_tx_back = SscqTxBuilder(
from_address=new_to_addr,
to_address=from_addr,
amount_satoshi=tx_amount,
sequence=to_acc.sequence,
account_number=to_acc.account_number,
chain_id=sscqrpc.chain_id,
gas_price=gas_price,
gas_wanted=gas_wanted,
data=data,
memo=memo
).build_and_sign(private_key=new_to_privkey)
if protocol_version < 2:
tx_hash_back = sscqrpc.broadcast_tx(tx_hex=signed_tx_back)
print('tx_hash_back: {}'.format(tx_hash_back))
tx = sscqrpc.get_tranaction_until_timeout(transaction_hash=tx_hash_back)
assert tx['logs'][0]['success'] == False
time.sleep(5) # wait for chain state update
to_acc_new = sscqrpc.get_account_info(address=new_to_addr.address)
assert to_acc_new is not None
assert to_acc_new.address == to_acc.address
assert to_acc_new.balance_satoshi == to_acc.balance_satoshi # balance not change
assert to_acc_new.sequence == to_acc.sequence + 1 # sequence changed
assert to_acc_new.account_number == to_acc.account_number
elif protocol_version == 2:
try:
tx_hash_back = sscqrpc.broadcast_tx(tx_hex=signed_tx_back)
print('tx_hash_back: {}'.format(tx_hash_back))
# error
assert False == True
except Exception as e:
# ok
print(e)
to_acc_new = sscqrpc.get_account_info(address=new_to_addr.address)
assert to_acc_new is not None
assert to_acc_new.address == to_acc.address
assert to_acc_new.balance_satoshi == to_acc.balance_satoshi # balance not change
assert to_acc_new.sequence == to_acc.sequence # sequence not change
assert to_acc_new.account_number == to_acc.account_number
pass
else:
raise Exception("invalid protocol version:{}".format(protocol_version))
pass
def test_5000_normal_send_txs(conftest_args):
"""
Node's mempool size is 5000 txs by default, if mempool is full, tx will be rejected.
the blockGasLimit of tendermint is 15,000,000 , if a tx's gasWanted is 30000,
single block could include 500 txs.
"""
txs_count = 5000
gas_wanted = 30000
gas_price = 100
tx_amount = 1
data = ''
memo = 'test_2000_normal_send_txs'
sscqrpc = SscqRPC(chaid_id=conftest_args['CHAINID'], rpc_host=conftest_args['RPC_HOST'], rpc_port=conftest_args['RPC_PORT'])
from_addr = Address(conftest_args['ADDRESS'])
new_to_addr = SscqPrivateKey('').address
private_key = SscqPrivateKey(conftest_args['PRIVATE_KEY'])
from_acc = sscqrpc.get_account_info(address=from_addr.address)
assert from_acc is not None
assert from_acc.balance_satoshi > (gas_price * gas_wanted + tx_amount) * txs_count
signed_tx_list = []
for n in range(txs_count):
signed_tx = SscqTxBuilder(
from_address=from_addr,
to_address=new_to_addr,
amount_satoshi=tx_amount,
sequence=from_acc.sequence + n,
account_number=from_acc.account_number,
chain_id=sscqrpc.chain_id,
gas_price=gas_price,
gas_wanted=gas_wanted,
data=data,
memo=memo
).build_and_sign(private_key=private_key)
signed_tx_list.append(signed_tx)
tx_hash_list = []
for n in range(txs_count):
tx_hash = sscqrpc.broadcast_tx(tx_hex=signed_tx_list[n])
# print('tx_hash: {}'.format(tx_hash))
tx_hash_list.append(tx_hash)
tx = sscqrpc.get_tranaction_until_timeout(transaction_hash=tx_hash_list[-1], timeout_secs=(txs_count / 500.0 * 6.0))
assert tx['logs'][0]['success'] == True
time.sleep(5) # wait for chain state update
to_acc = sscqrpc.get_account_info(address=new_to_addr.address)
assert to_acc is not None
assert to_acc.balance_satoshi == tx_amount * txs_count
from_acc_new = sscqrpc.get_account_info(address=from_addr.address)
assert from_acc_new.address == from_acc.address
assert from_acc_new.sequence == from_acc.sequence + txs_count
assert from_acc_new.account_number == from_acc.account_number
assert from_acc_new.balance_satoshi == from_acc.balance_satoshi - (gas_price * gas_wanted + tx_amount) * txs_count
pass
def main():
pass
if __name__ == '__main__':
pytest.main('test_normal_tx.py')
pass
| 36.054363
| 155
| 0.673954
| 3,447
| 25,202
| 4.611256
| 0.064694
| 0.043158
| 0.03435
| 0.028185
| 0.870462
| 0.856559
| 0.836552
| 0.828688
| 0.827996
| 0.821139
| 0
| 0.01662
| 0.214507
| 25,202
| 698
| 156
| 36.106017
| 0.78632
| 0.07797
| 0
| 0.843444
| 0
| 0
| 0.091644
| 0.010586
| 0
| 0
| 0
| 0.001433
| 0.322896
| 1
| 0.019569
| false
| 0.021526
| 0.011742
| 0
| 0.031311
| 0.066536
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 7
|
f546f90b74113b1585b96838b6237f677fccf609
| 133
|
py
|
Python
|
sphinxcontrib/test_reports/__init__.py
|
lassebn/sphinx-test-reports
|
1655c81a3b9e79cd46244810cd7f3ef8fb84c1aa
|
[
"MIT"
] | 12
|
2019-04-06T14:12:06.000Z
|
2021-11-04T21:10:03.000Z
|
sphinxcontrib/test_reports/__init__.py
|
lassebn/sphinx-test-reports
|
1655c81a3b9e79cd46244810cd7f3ef8fb84c1aa
|
[
"MIT"
] | 20
|
2018-11-16T09:56:27.000Z
|
2022-02-04T22:07:43.000Z
|
sphinxcontrib/test_reports/__init__.py
|
lassebn/sphinx-test-reports
|
1655c81a3b9e79cd46244810cd7f3ef8fb84c1aa
|
[
"MIT"
] | 7
|
2018-11-23T14:13:40.000Z
|
2021-12-16T14:44:43.000Z
|
# from sphinxcontrib.test_reports.needs.dyn_functions import Results4Needs
from sphinxcontrib.test_reports.test_reports import setup
| 44.333333
| 74
| 0.887218
| 17
| 133
| 6.705882
| 0.588235
| 0.289474
| 0.368421
| 0.491228
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.008065
| 0.067669
| 133
| 2
| 75
| 66.5
| 0.91129
| 0.541353
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| true
| 0
| 1
| 0
| 1
| 0
| 1
| 0
| 0
| null | 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 0
| 1
| 0
|
0
| 8
|
f5647c69481df6b09c86868ee7cf6708a98a56a8
| 6,961
|
py
|
Python
|
pid/assets.py
|
PlanetaryResources/pid
|
ecb146cc26c6ade2863bcdc6d271ead3cbcbbe40
|
[
"Apache-2.0"
] | 3
|
2019-06-14T18:05:22.000Z
|
2020-01-22T17:38:17.000Z
|
pid/assets.py
|
PlanetaryResources/pid
|
ecb146cc26c6ade2863bcdc6d271ead3cbcbbe40
|
[
"Apache-2.0"
] | null | null | null |
pid/assets.py
|
PlanetaryResources/pid
|
ecb146cc26c6ade2863bcdc6d271ead3cbcbbe40
|
[
"Apache-2.0"
] | null | null | null |
# -*- coding: utf-8 -*-
"""Application assets."""
from flask_assets import Bundle, Environment
css = Bundle(
'libs/bootstrap-3.3.0/css/bootstrap.min.css',
'libs/bootflat-2.0.4/css/bootflat.min.css',
'libs/photoswipe-4.1.2/css/photoswipe.css',
'libs/photoswipe-4.1.2/default-skin/default-skin.css',
'libs/bootstrap3-editable-5d8ecf3/css/bootstrap-editable.css',
'libs/bootstrap-datepicker-1.7.0/css/bootstrap-datepicker3.min.css',
'libs/jasny-bootstrap-3.1.3/css/jasny-bootstrap.min.css',
'libs/jBox-0.4.8/jBox.css',
'libs/jquery-typeahead-2.8.0/css/jquery.typeahead.min.css',
'libs/DataTables-1.10.15/css/datatables.css',
'libs/trumbowyg-2.7.0/ui/trumbowyg.min.css',
'libs/trumbowyg-2.7.0/plugins/colors/ui/trumbowyg.colors.min.css',
'css/fonts.css',
'css/code-blocks.css',
'css/pri-icons.css',
'css/header.css',
'css/footer.css',
'css/navbar.css',
'css/typeahead.css',
'css/style.css',
filters='cssmin',
output='public/css/common.css'
)
js = Bundle(
'libs/jquery-3.2.1/jquery.min.js',
'libs/jqueryrotate-2.3/jqueryrotate.min.js', # For rotating spacecraft only at this point
'libs/form-4.2.1/jquery.form.min.js', # For easy submitting HTML forms via AJAX
'libs/bootstrap-3.3.0/js/bootstrap.min.js',
'libs/bootstrap-datepicker-1.7.0/js/bootstrap-datepicker.min.js',
'libs/dropzone-4.3.0/js/dropzone.min.js', # For document/image drag and drops
'libs/photoswipe-4.1.2/js/photoswipe.min.js', # For image galleries
'libs/photoswipe-4.1.2/js/photoswipe-ui-default.min.js',
'libs/jquery.photoswipe-global.js', # Makes photoswipe easier. From https://github.com/yaquawa/jquery.photoswipe
'libs/bootstrap3-editable-5d8ecf3/js/bootstrap-editable.min.js', # For inline editing
'libs/bootstrap-notify-7b4711e/bootstrap-notify.min.js', # For success/error alerts with AJAX calls
'libs/jasny-bootstrap-3.1.3/js/jasny-bootstrap.min.js', # For entire table rows to act as clickable links
'libs/jBox-0.4.8/jBox.min.js', # For modals
'libs/jquery-typeahead-2.8.0/js/jquery.typeahead.min.js', # For typeahead searches
'libs/jquery-treetable-3.2.0/jquery.treetable.js',
'libs/garand-sticky-5cfc20c/jquery.sticky.js', # For sticky header row
'libs/DataTables-1.10.15/js/datatables.js',
'libs/trumbowyg-2.7.0/trumbowyg.min.js', # WYSIWYG editor
'libs/trumbowyg-2.7.0/plugins/colors/trumbowyg.colors.min.js',
'libs/trumbowyg-2.7.0/plugins/preformatted/trumbowyg.preformatted.min.js',
'libs/trumbowyg-2.7.0/plugins/base64/trumbowyg.base64.min.js',
'libs/velocity-1.5.0/velocity.min.js', # Effects library
'libs/konami-js-1.4.6/konami.js',
'js/globals.js', # Default settings for various JS libs
'js/jquery-plugins.js', # Various small jQuery helper scripts
'js/functions.js', # Our own JS functions
'js/modals.js', # Global modals
filters='jsmin',
output='public/js/common.js'
)
assets = Environment()
assets.register('js_all', js)
assets.register('css_all', css)
css_dev = Bundle(
'libs/bootstrap-3.3.0/css/bootstrap.css',
'libs/bootflat-2.0.4/css/bootflat.css',
'libs/photoswipe-4.1.2/css/photoswipe.css',
'libs/photoswipe-4.1.2/default-skin/default-skin.css',
'libs/bootstrap3-editable-5d8ecf3/css/bootstrap-editable.css',
'libs/bootstrap-datepicker-1.7.0/css/bootstrap-datepicker3.css',
'libs/jasny-bootstrap-3.1.3/css/jasny-bootstrap.css',
'libs/jBox-0.4.8/jBox.css',
'libs/jquery-typeahead-2.8.0/css/jquery.typeahead.css',
'libs/DataTables-1.10.15/css/datatables.css',
'libs/trumbowyg-2.7.0/ui/trumbowyg.css',
'libs/trumbowyg-2.7.0/plugins/colors/ui/trumbowyg.colors.css',
'css/fonts.css',
'css/code-blocks.css',
'css/pri-icons.css',
'css/header.css',
'css/footer.css',
'css/navbar.css',
'css/typeahead.css',
'css/style.css',
filters='cssmin',
output='public/css/common.css'
)
js_dev = Bundle(
'libs/jquery-3.2.1/jquery.js',
'libs/jqueryrotate-2.3/jqueryrotate.js', # For rotating spacecraft only at this point
'libs/form-4.2.1/jquery.form.js', # For easy submitting HTML forms via AJAX
'libs/bootstrap-3.3.0/js/bootstrap.js',
'libs/bootstrap-datepicker-1.7.0/js/bootstrap-datepicker.js',
'libs/dropzone-4.3.0/js/dropzone.js', # For document/image drag and drops
'libs/photoswipe-4.1.2/js/photoswipe.js', # For image galleries
'libs/photoswipe-4.1.2/js/photoswipe-ui-default.js',
'libs/jquery.photoswipe-global.js', # Makes photoswipe easier. From https://github.com/yaquawa/jquery.photoswipe
'libs/bootstrap3-editable-5d8ecf3/js/bootstrap-editable.js', # For inline editing
'libs/bootstrap-notify-7b4711e/bootstrap-notify.js', # For success/error alerts with AJAX calls
'libs/jasny-bootstrap-3.1.3/js/jasny-bootstrap.js', # For entire table rows to act as clickable links
'libs/jBox-0.4.8/jBox.js', # For modals
'libs/jquery-typeahead-2.8.0/js/jquery.typeahead.js', # For typeahead searches
'libs/jquery-treetable-3.2.0/jquery.treetable.js',
'libs/garand-sticky-5cfc20c/jquery.sticky.js', # For sticky header row
'libs/DataTables-1.10.15/js/datatables.js', # For sortable tables etc on dashboard
'libs/trumbowyg-2.7.0/trumbowyg.js', # WYSIWYG editor
'libs/trumbowyg-2.7.0/plugins/colors/trumbowyg.colors.js',
'libs/trumbowyg-2.7.0/plugins/preformatted/trumbowyg.preformatted.js',
'libs/trumbowyg-2.7.0/plugins/base64/trumbowyg.base64.js',
'libs/trumbowyg-2.7.0/plugins/cleanpaste/trumbowyg.cleanpaste.js',
'libs/velocity-1.5.0/velocity.js', # Effects library
'libs/konami-js-1.4.6/konami.js',
'js/globals.js', # Default settings for various JS libs
'js/jquery-plugins.js', # Various small jQuery helper scripts
'js/functions.js', # Our own JS functions
'js/modals.js', # Global modals
filters='jsmin',
output='public/js/common.js'
)
assets.register('js_dev', js_dev)
assets.register('css_dev', css_dev)
| 54.811024
| 148
| 0.605229
| 944
| 6,961
| 4.45339
| 0.145127
| 0.036632
| 0.043292
| 0.046384
| 0.92745
| 0.92745
| 0.900333
| 0.870599
| 0.826356
| 0.826356
| 0
| 0.044148
| 0.24508
| 6,961
| 126
| 149
| 55.246032
| 0.755852
| 0.159316
| 0
| 0.444444
| 0
| 0.102564
| 0.63483
| 0.557735
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| false
| 0
| 0.008547
| 0
| 0.008547
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 1
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 8
|
f57e6b3fdcdc9d69dd28bab6eca5336ad63fdc79
| 1,640
|
py
|
Python
|
tests/unit_tests/test_015_resources_hostgroups.py
|
Whyrl35/sshportal-api
|
1aa437e746f01ba1df48de904466f91a103a11ad
|
[
"MIT"
] | 1
|
2021-07-06T21:11:06.000Z
|
2021-07-06T21:11:06.000Z
|
tests/unit_tests/test_015_resources_hostgroups.py
|
Whyrl35/sshportal-api
|
1aa437e746f01ba1df48de904466f91a103a11ad
|
[
"MIT"
] | null | null | null |
tests/unit_tests/test_015_resources_hostgroups.py
|
Whyrl35/sshportal-api
|
1aa437e746f01ba1df48de904466f91a103a11ad
|
[
"MIT"
] | null | null | null |
from flask import url_for
def test_hostgroups(client, access_token):
token = access_token
res = client.get(url_for('hostgroups'), headers={'authorization': "Bearer {token}".format(token=token)})
assert res.status_code == 200
assert res.json[0]['id'] == 1
assert res.json[0]['name'] == "default"
assert res.json[0]['comment'] == "created by sshportal"
assert 'acls' in res.json[0]
assert 'hosts' in res.json[0]
assert res.json[0]['acls'][0]['id'] == 1
assert res.json[0]['acls'][0]['comment'] == "created by sshportal"
def test_hostgroup_id(client, access_token):
token = access_token
res = client.get(url_for('hostgroupid', id=1), headers={'authorization': "Bearer {token}".format(token=token)})
assert res.status_code == 200
assert res.json['id'] == 1
assert res.json['name'] == "default"
assert res.json['comment'] == "created by sshportal"
assert 'acls' in res.json
assert 'hosts' in res.json
assert res.json['acls'][0]['id'] == 1
assert res.json['acls'][0]['comment'] == "created by sshportal"
def test_hostgroup_name(client, access_token):
token = access_token
res = client.get(url_for(
'hostgroupname', name="default"),
headers={'authorization': "Bearer {token}".format(token=token)}
)
assert res.status_code == 200
assert res.json['id'] == 1
assert res.json['name'] == "default"
assert res.json['comment'] == "created by sshportal"
assert 'acls' in res.json
assert 'hosts' in res.json
assert res.json['acls'][0]['id'] == 1
assert res.json['acls'][0]['comment'] == "created by sshportal"
| 37.272727
| 115
| 0.641463
| 228
| 1,640
| 4.535088
| 0.162281
| 0.142166
| 0.188588
| 0.069633
| 0.911025
| 0.865571
| 0.852031
| 0.813346
| 0.813346
| 0.710832
| 0
| 0.02161
| 0.181707
| 1,640
| 43
| 116
| 38.139535
| 0.748882
| 0
| 0
| 0.540541
| 0
| 0
| 0.231707
| 0
| 0
| 0
| 0
| 0
| 0.648649
| 1
| 0.081081
| false
| 0
| 0.027027
| 0
| 0.108108
| 0
| 0
| 0
| 0
| null | 0
| 1
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 8
|
f5846762b4b1e8b2182306476f6637f1c493461f
| 609
|
py
|
Python
|
src/graph_transpiler/webdnn/frontend/keras/layers/__init__.py
|
steerapi/webdnn
|
1df51cc094e5a528cfd3452c264905708eadb491
|
[
"MIT"
] | 1
|
2021-04-09T15:55:35.000Z
|
2021-04-09T15:55:35.000Z
|
src/graph_transpiler/webdnn/frontend/keras/layers/__init__.py
|
steerapi/webdnn
|
1df51cc094e5a528cfd3452c264905708eadb491
|
[
"MIT"
] | null | null | null |
src/graph_transpiler/webdnn/frontend/keras/layers/__init__.py
|
steerapi/webdnn
|
1df51cc094e5a528cfd3452c264905708eadb491
|
[
"MIT"
] | null | null | null |
from webdnn.frontend.keras.layers import advanced_activations
from webdnn.frontend.keras.layers import convolutional
from webdnn.frontend.keras.layers import core
from webdnn.frontend.keras.layers import embeddings
from webdnn.frontend.keras.layers import local
from webdnn.frontend.keras.layers import merge
from webdnn.frontend.keras.layers import models
from webdnn.frontend.keras.layers import noise
from webdnn.frontend.keras.layers import normalization
from webdnn.frontend.keras.layers import pooling
from webdnn.frontend.keras.layers import recurrent
from webdnn.frontend.keras.layers import wrappers
| 46.846154
| 61
| 0.862069
| 85
| 609
| 6.164706
| 0.223529
| 0.229008
| 0.412214
| 0.526718
| 0.801527
| 0.801527
| 0
| 0
| 0
| 0
| 0
| 0
| 0.078818
| 609
| 12
| 62
| 50.75
| 0.934046
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| true
| 0
| 1
| 0
| 1
| 0
| 0
| 0
| 0
| null | 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 0
| 1
| 0
|
0
| 8
|
f5a396047a9739cd1e9e45d41cabac50c9de95e3
| 48,134
|
py
|
Python
|
great_international/migrations/0011_auto_20190404_1321.py
|
uktrade/directory-cms
|
8c8d13ce29ea74ddce7a40f3dd29c8847145d549
|
[
"MIT"
] | 6
|
2018-03-20T11:19:07.000Z
|
2021-10-05T07:53:11.000Z
|
great_international/migrations/0011_auto_20190404_1321.py
|
uktrade/directory-cms
|
8c8d13ce29ea74ddce7a40f3dd29c8847145d549
|
[
"MIT"
] | 802
|
2018-02-05T14:16:13.000Z
|
2022-02-10T10:59:21.000Z
|
great_international/migrations/0011_auto_20190404_1321.py
|
uktrade/directory-cms
|
8c8d13ce29ea74ddce7a40f3dd29c8847145d549
|
[
"MIT"
] | 6
|
2019-01-22T13:19:37.000Z
|
2019-07-01T10:35:26.000Z
|
# -*- coding: utf-8 -*-
# Generated by Django 1.11.18 on 2019-04-04 13:21
from __future__ import unicode_literals
import core.model_fields
import core.validators
from django.db import migrations, models
import django.db.models.deletion
class Migration(migrations.Migration):
dependencies = [
('wagtailimages', '0021_image_file_hash'),
('wagtailcore', '0040_page_draft_title'),
('great_international', '0010_auto_20190404_1320'),
]
operations = [
migrations.AddField(
model_name='internationalhomepage',
name='hero_cta_text_ar',
field=models.CharField(blank=True, max_length=255, null=True),
),
migrations.AddField(
model_name='internationalhomepage',
name='hero_cta_text_de',
field=models.CharField(blank=True, max_length=255, null=True),
),
migrations.AddField(
model_name='internationalhomepage',
name='hero_cta_text_en_gb',
field=models.CharField(blank=True, max_length=255, null=True),
),
migrations.AddField(
model_name='internationalhomepage',
name='hero_cta_text_es',
field=models.CharField(blank=True, max_length=255, null=True),
),
migrations.AddField(
model_name='internationalhomepage',
name='hero_cta_text_fr',
field=models.CharField(blank=True, max_length=255, null=True),
),
migrations.AddField(
model_name='internationalhomepage',
name='hero_cta_text_ja',
field=models.CharField(blank=True, max_length=255, null=True),
),
migrations.AddField(
model_name='internationalhomepage',
name='hero_cta_text_pt',
field=models.CharField(blank=True, max_length=255, null=True),
),
migrations.AddField(
model_name='internationalhomepage',
name='hero_cta_text_pt_br',
field=models.CharField(blank=True, max_length=255, null=True),
),
migrations.AddField(
model_name='internationalhomepage',
name='hero_cta_text_ru',
field=models.CharField(blank=True, max_length=255, null=True),
),
migrations.AddField(
model_name='internationalhomepage',
name='hero_cta_text_zh_hans',
field=models.CharField(blank=True, max_length=255, null=True),
),
migrations.AddField(
model_name='internationalhomepage',
name='hero_image_ar',
field=models.ForeignKey(blank=True, null=True, on_delete=django.db.models.deletion.SET_NULL, related_name='+', to='wagtailimages.Image'),
),
migrations.AddField(
model_name='internationalhomepage',
name='hero_image_de',
field=models.ForeignKey(blank=True, null=True, on_delete=django.db.models.deletion.SET_NULL, related_name='+', to='wagtailimages.Image'),
),
migrations.AddField(
model_name='internationalhomepage',
name='hero_image_en_gb',
field=models.ForeignKey(blank=True, null=True, on_delete=django.db.models.deletion.SET_NULL, related_name='+', to='wagtailimages.Image'),
),
migrations.AddField(
model_name='internationalhomepage',
name='hero_image_es',
field=models.ForeignKey(blank=True, null=True, on_delete=django.db.models.deletion.SET_NULL, related_name='+', to='wagtailimages.Image'),
),
migrations.AddField(
model_name='internationalhomepage',
name='hero_image_fr',
field=models.ForeignKey(blank=True, null=True, on_delete=django.db.models.deletion.SET_NULL, related_name='+', to='wagtailimages.Image'),
),
migrations.AddField(
model_name='internationalhomepage',
name='hero_image_ja',
field=models.ForeignKey(blank=True, null=True, on_delete=django.db.models.deletion.SET_NULL, related_name='+', to='wagtailimages.Image'),
),
migrations.AddField(
model_name='internationalhomepage',
name='hero_image_pt',
field=models.ForeignKey(blank=True, null=True, on_delete=django.db.models.deletion.SET_NULL, related_name='+', to='wagtailimages.Image'),
),
migrations.AddField(
model_name='internationalhomepage',
name='hero_image_pt_br',
field=models.ForeignKey(blank=True, null=True, on_delete=django.db.models.deletion.SET_NULL, related_name='+', to='wagtailimages.Image'),
),
migrations.AddField(
model_name='internationalhomepage',
name='hero_image_ru',
field=models.ForeignKey(blank=True, null=True, on_delete=django.db.models.deletion.SET_NULL, related_name='+', to='wagtailimages.Image'),
),
migrations.AddField(
model_name='internationalhomepage',
name='hero_image_zh_hans',
field=models.ForeignKey(blank=True, null=True, on_delete=django.db.models.deletion.SET_NULL, related_name='+', to='wagtailimages.Image'),
),
migrations.AddField(
model_name='internationalhomepage',
name='hero_subtitle_ar',
field=models.CharField(blank=True, max_length=255, null=True),
),
migrations.AddField(
model_name='internationalhomepage',
name='hero_subtitle_de',
field=models.CharField(blank=True, max_length=255, null=True),
),
migrations.AddField(
model_name='internationalhomepage',
name='hero_subtitle_en_gb',
field=models.CharField(blank=True, max_length=255, null=True),
),
migrations.AddField(
model_name='internationalhomepage',
name='hero_subtitle_es',
field=models.CharField(blank=True, max_length=255, null=True),
),
migrations.AddField(
model_name='internationalhomepage',
name='hero_subtitle_fr',
field=models.CharField(blank=True, max_length=255, null=True),
),
migrations.AddField(
model_name='internationalhomepage',
name='hero_subtitle_ja',
field=models.CharField(blank=True, max_length=255, null=True),
),
migrations.AddField(
model_name='internationalhomepage',
name='hero_subtitle_pt',
field=models.CharField(blank=True, max_length=255, null=True),
),
migrations.AddField(
model_name='internationalhomepage',
name='hero_subtitle_pt_br',
field=models.CharField(blank=True, max_length=255, null=True),
),
migrations.AddField(
model_name='internationalhomepage',
name='hero_subtitle_ru',
field=models.CharField(blank=True, max_length=255, null=True),
),
migrations.AddField(
model_name='internationalhomepage',
name='hero_subtitle_zh_hans',
field=models.CharField(blank=True, max_length=255, null=True),
),
migrations.AddField(
model_name='internationalhomepage',
name='hero_title_ar',
field=models.CharField(max_length=255, null=True),
),
migrations.AddField(
model_name='internationalhomepage',
name='hero_title_de',
field=models.CharField(max_length=255, null=True),
),
migrations.AddField(
model_name='internationalhomepage',
name='hero_title_en_gb',
field=models.CharField(max_length=255, null=True),
),
migrations.AddField(
model_name='internationalhomepage',
name='hero_title_es',
field=models.CharField(max_length=255, null=True),
),
migrations.AddField(
model_name='internationalhomepage',
name='hero_title_fr',
field=models.CharField(max_length=255, null=True),
),
migrations.AddField(
model_name='internationalhomepage',
name='hero_title_ja',
field=models.CharField(max_length=255, null=True),
),
migrations.AddField(
model_name='internationalhomepage',
name='hero_title_pt',
field=models.CharField(max_length=255, null=True),
),
migrations.AddField(
model_name='internationalhomepage',
name='hero_title_pt_br',
field=models.CharField(max_length=255, null=True),
),
migrations.AddField(
model_name='internationalhomepage',
name='hero_title_ru',
field=models.CharField(max_length=255, null=True),
),
migrations.AddField(
model_name='internationalhomepage',
name='hero_title_zh_hans',
field=models.CharField(max_length=255, null=True),
),
migrations.AddField(
model_name='internationalhomepage',
name='invest_content_ar',
field=core.model_fields.MarkdownField(blank=True, null=True, validators=[core.validators.slug_hyperlinks]),
),
migrations.AddField(
model_name='internationalhomepage',
name='invest_content_de',
field=core.model_fields.MarkdownField(blank=True, null=True, validators=[core.validators.slug_hyperlinks]),
),
migrations.AddField(
model_name='internationalhomepage',
name='invest_content_en_gb',
field=core.model_fields.MarkdownField(blank=True, null=True, validators=[core.validators.slug_hyperlinks]),
),
migrations.AddField(
model_name='internationalhomepage',
name='invest_content_es',
field=core.model_fields.MarkdownField(blank=True, null=True, validators=[core.validators.slug_hyperlinks]),
),
migrations.AddField(
model_name='internationalhomepage',
name='invest_content_fr',
field=core.model_fields.MarkdownField(blank=True, null=True, validators=[core.validators.slug_hyperlinks]),
),
migrations.AddField(
model_name='internationalhomepage',
name='invest_content_ja',
field=core.model_fields.MarkdownField(blank=True, null=True, validators=[core.validators.slug_hyperlinks]),
),
migrations.AddField(
model_name='internationalhomepage',
name='invest_content_pt',
field=core.model_fields.MarkdownField(blank=True, null=True, validators=[core.validators.slug_hyperlinks]),
),
migrations.AddField(
model_name='internationalhomepage',
name='invest_content_pt_br',
field=core.model_fields.MarkdownField(blank=True, null=True, validators=[core.validators.slug_hyperlinks]),
),
migrations.AddField(
model_name='internationalhomepage',
name='invest_content_ru',
field=core.model_fields.MarkdownField(blank=True, null=True, validators=[core.validators.slug_hyperlinks]),
),
migrations.AddField(
model_name='internationalhomepage',
name='invest_content_zh_hans',
field=core.model_fields.MarkdownField(blank=True, null=True, validators=[core.validators.slug_hyperlinks]),
),
migrations.AddField(
model_name='internationalhomepage',
name='invest_image_ar',
field=models.ForeignKey(blank=True, null=True, on_delete=django.db.models.deletion.SET_NULL, related_name='+', to='wagtailimages.Image'),
),
migrations.AddField(
model_name='internationalhomepage',
name='invest_image_de',
field=models.ForeignKey(blank=True, null=True, on_delete=django.db.models.deletion.SET_NULL, related_name='+', to='wagtailimages.Image'),
),
migrations.AddField(
model_name='internationalhomepage',
name='invest_image_en_gb',
field=models.ForeignKey(blank=True, null=True, on_delete=django.db.models.deletion.SET_NULL, related_name='+', to='wagtailimages.Image'),
),
migrations.AddField(
model_name='internationalhomepage',
name='invest_image_es',
field=models.ForeignKey(blank=True, null=True, on_delete=django.db.models.deletion.SET_NULL, related_name='+', to='wagtailimages.Image'),
),
migrations.AddField(
model_name='internationalhomepage',
name='invest_image_fr',
field=models.ForeignKey(blank=True, null=True, on_delete=django.db.models.deletion.SET_NULL, related_name='+', to='wagtailimages.Image'),
),
migrations.AddField(
model_name='internationalhomepage',
name='invest_image_ja',
field=models.ForeignKey(blank=True, null=True, on_delete=django.db.models.deletion.SET_NULL, related_name='+', to='wagtailimages.Image'),
),
migrations.AddField(
model_name='internationalhomepage',
name='invest_image_pt',
field=models.ForeignKey(blank=True, null=True, on_delete=django.db.models.deletion.SET_NULL, related_name='+', to='wagtailimages.Image'),
),
migrations.AddField(
model_name='internationalhomepage',
name='invest_image_pt_br',
field=models.ForeignKey(blank=True, null=True, on_delete=django.db.models.deletion.SET_NULL, related_name='+', to='wagtailimages.Image'),
),
migrations.AddField(
model_name='internationalhomepage',
name='invest_image_ru',
field=models.ForeignKey(blank=True, null=True, on_delete=django.db.models.deletion.SET_NULL, related_name='+', to='wagtailimages.Image'),
),
migrations.AddField(
model_name='internationalhomepage',
name='invest_image_zh_hans',
field=models.ForeignKey(blank=True, null=True, on_delete=django.db.models.deletion.SET_NULL, related_name='+', to='wagtailimages.Image'),
),
migrations.AddField(
model_name='internationalhomepage',
name='invest_title_ar',
field=models.CharField(max_length=255, null=True),
),
migrations.AddField(
model_name='internationalhomepage',
name='invest_title_de',
field=models.CharField(max_length=255, null=True),
),
migrations.AddField(
model_name='internationalhomepage',
name='invest_title_en_gb',
field=models.CharField(max_length=255, null=True),
),
migrations.AddField(
model_name='internationalhomepage',
name='invest_title_es',
field=models.CharField(max_length=255, null=True),
),
migrations.AddField(
model_name='internationalhomepage',
name='invest_title_fr',
field=models.CharField(max_length=255, null=True),
),
migrations.AddField(
model_name='internationalhomepage',
name='invest_title_ja',
field=models.CharField(max_length=255, null=True),
),
migrations.AddField(
model_name='internationalhomepage',
name='invest_title_pt',
field=models.CharField(max_length=255, null=True),
),
migrations.AddField(
model_name='internationalhomepage',
name='invest_title_pt_br',
field=models.CharField(max_length=255, null=True),
),
migrations.AddField(
model_name='internationalhomepage',
name='invest_title_ru',
field=models.CharField(max_length=255, null=True),
),
migrations.AddField(
model_name='internationalhomepage',
name='invest_title_zh_hans',
field=models.CharField(max_length=255, null=True),
),
migrations.AddField(
model_name='internationalhomepage',
name='news_title_ar',
field=models.CharField(max_length=255, null=True),
),
migrations.AddField(
model_name='internationalhomepage',
name='news_title_de',
field=models.CharField(max_length=255, null=True),
),
migrations.AddField(
model_name='internationalhomepage',
name='news_title_en_gb',
field=models.CharField(max_length=255, null=True),
),
migrations.AddField(
model_name='internationalhomepage',
name='news_title_es',
field=models.CharField(max_length=255, null=True),
),
migrations.AddField(
model_name='internationalhomepage',
name='news_title_fr',
field=models.CharField(max_length=255, null=True),
),
migrations.AddField(
model_name='internationalhomepage',
name='news_title_ja',
field=models.CharField(max_length=255, null=True),
),
migrations.AddField(
model_name='internationalhomepage',
name='news_title_pt',
field=models.CharField(max_length=255, null=True),
),
migrations.AddField(
model_name='internationalhomepage',
name='news_title_pt_br',
field=models.CharField(max_length=255, null=True),
),
migrations.AddField(
model_name='internationalhomepage',
name='news_title_ru',
field=models.CharField(max_length=255, null=True),
),
migrations.AddField(
model_name='internationalhomepage',
name='news_title_zh_hans',
field=models.CharField(max_length=255, null=True),
),
migrations.AddField(
model_name='internationalhomepage',
name='related_page_one_ar',
field=models.ForeignKey(blank=True, null=True, on_delete=django.db.models.deletion.SET_NULL, related_name='+', to='wagtailcore.Page'),
),
migrations.AddField(
model_name='internationalhomepage',
name='related_page_one_de',
field=models.ForeignKey(blank=True, null=True, on_delete=django.db.models.deletion.SET_NULL, related_name='+', to='wagtailcore.Page'),
),
migrations.AddField(
model_name='internationalhomepage',
name='related_page_one_en_gb',
field=models.ForeignKey(blank=True, null=True, on_delete=django.db.models.deletion.SET_NULL, related_name='+', to='wagtailcore.Page'),
),
migrations.AddField(
model_name='internationalhomepage',
name='related_page_one_es',
field=models.ForeignKey(blank=True, null=True, on_delete=django.db.models.deletion.SET_NULL, related_name='+', to='wagtailcore.Page'),
),
migrations.AddField(
model_name='internationalhomepage',
name='related_page_one_fr',
field=models.ForeignKey(blank=True, null=True, on_delete=django.db.models.deletion.SET_NULL, related_name='+', to='wagtailcore.Page'),
),
migrations.AddField(
model_name='internationalhomepage',
name='related_page_one_ja',
field=models.ForeignKey(blank=True, null=True, on_delete=django.db.models.deletion.SET_NULL, related_name='+', to='wagtailcore.Page'),
),
migrations.AddField(
model_name='internationalhomepage',
name='related_page_one_pt',
field=models.ForeignKey(blank=True, null=True, on_delete=django.db.models.deletion.SET_NULL, related_name='+', to='wagtailcore.Page'),
),
migrations.AddField(
model_name='internationalhomepage',
name='related_page_one_pt_br',
field=models.ForeignKey(blank=True, null=True, on_delete=django.db.models.deletion.SET_NULL, related_name='+', to='wagtailcore.Page'),
),
migrations.AddField(
model_name='internationalhomepage',
name='related_page_one_ru',
field=models.ForeignKey(blank=True, null=True, on_delete=django.db.models.deletion.SET_NULL, related_name='+', to='wagtailcore.Page'),
),
migrations.AddField(
model_name='internationalhomepage',
name='related_page_one_zh_hans',
field=models.ForeignKey(blank=True, null=True, on_delete=django.db.models.deletion.SET_NULL, related_name='+', to='wagtailcore.Page'),
),
migrations.AddField(
model_name='internationalhomepage',
name='related_page_three_ar',
field=models.ForeignKey(blank=True, null=True, on_delete=django.db.models.deletion.SET_NULL, related_name='+', to='wagtailcore.Page'),
),
migrations.AddField(
model_name='internationalhomepage',
name='related_page_three_de',
field=models.ForeignKey(blank=True, null=True, on_delete=django.db.models.deletion.SET_NULL, related_name='+', to='wagtailcore.Page'),
),
migrations.AddField(
model_name='internationalhomepage',
name='related_page_three_en_gb',
field=models.ForeignKey(blank=True, null=True, on_delete=django.db.models.deletion.SET_NULL, related_name='+', to='wagtailcore.Page'),
),
migrations.AddField(
model_name='internationalhomepage',
name='related_page_three_es',
field=models.ForeignKey(blank=True, null=True, on_delete=django.db.models.deletion.SET_NULL, related_name='+', to='wagtailcore.Page'),
),
migrations.AddField(
model_name='internationalhomepage',
name='related_page_three_fr',
field=models.ForeignKey(blank=True, null=True, on_delete=django.db.models.deletion.SET_NULL, related_name='+', to='wagtailcore.Page'),
),
migrations.AddField(
model_name='internationalhomepage',
name='related_page_three_ja',
field=models.ForeignKey(blank=True, null=True, on_delete=django.db.models.deletion.SET_NULL, related_name='+', to='wagtailcore.Page'),
),
migrations.AddField(
model_name='internationalhomepage',
name='related_page_three_pt',
field=models.ForeignKey(blank=True, null=True, on_delete=django.db.models.deletion.SET_NULL, related_name='+', to='wagtailcore.Page'),
),
migrations.AddField(
model_name='internationalhomepage',
name='related_page_three_pt_br',
field=models.ForeignKey(blank=True, null=True, on_delete=django.db.models.deletion.SET_NULL, related_name='+', to='wagtailcore.Page'),
),
migrations.AddField(
model_name='internationalhomepage',
name='related_page_three_ru',
field=models.ForeignKey(blank=True, null=True, on_delete=django.db.models.deletion.SET_NULL, related_name='+', to='wagtailcore.Page'),
),
migrations.AddField(
model_name='internationalhomepage',
name='related_page_three_zh_hans',
field=models.ForeignKey(blank=True, null=True, on_delete=django.db.models.deletion.SET_NULL, related_name='+', to='wagtailcore.Page'),
),
migrations.AddField(
model_name='internationalhomepage',
name='related_page_two_ar',
field=models.ForeignKey(blank=True, null=True, on_delete=django.db.models.deletion.SET_NULL, related_name='+', to='wagtailcore.Page'),
),
migrations.AddField(
model_name='internationalhomepage',
name='related_page_two_de',
field=models.ForeignKey(blank=True, null=True, on_delete=django.db.models.deletion.SET_NULL, related_name='+', to='wagtailcore.Page'),
),
migrations.AddField(
model_name='internationalhomepage',
name='related_page_two_en_gb',
field=models.ForeignKey(blank=True, null=True, on_delete=django.db.models.deletion.SET_NULL, related_name='+', to='wagtailcore.Page'),
),
migrations.AddField(
model_name='internationalhomepage',
name='related_page_two_es',
field=models.ForeignKey(blank=True, null=True, on_delete=django.db.models.deletion.SET_NULL, related_name='+', to='wagtailcore.Page'),
),
migrations.AddField(
model_name='internationalhomepage',
name='related_page_two_fr',
field=models.ForeignKey(blank=True, null=True, on_delete=django.db.models.deletion.SET_NULL, related_name='+', to='wagtailcore.Page'),
),
migrations.AddField(
model_name='internationalhomepage',
name='related_page_two_ja',
field=models.ForeignKey(blank=True, null=True, on_delete=django.db.models.deletion.SET_NULL, related_name='+', to='wagtailcore.Page'),
),
migrations.AddField(
model_name='internationalhomepage',
name='related_page_two_pt',
field=models.ForeignKey(blank=True, null=True, on_delete=django.db.models.deletion.SET_NULL, related_name='+', to='wagtailcore.Page'),
),
migrations.AddField(
model_name='internationalhomepage',
name='related_page_two_pt_br',
field=models.ForeignKey(blank=True, null=True, on_delete=django.db.models.deletion.SET_NULL, related_name='+', to='wagtailcore.Page'),
),
migrations.AddField(
model_name='internationalhomepage',
name='related_page_two_ru',
field=models.ForeignKey(blank=True, null=True, on_delete=django.db.models.deletion.SET_NULL, related_name='+', to='wagtailcore.Page'),
),
migrations.AddField(
model_name='internationalhomepage',
name='related_page_two_zh_hans',
field=models.ForeignKey(blank=True, null=True, on_delete=django.db.models.deletion.SET_NULL, related_name='+', to='wagtailcore.Page'),
),
migrations.AddField(
model_name='internationalhomepage',
name='study_in_uk_cta_text_ar',
field=models.CharField(max_length=255, null=True),
),
migrations.AddField(
model_name='internationalhomepage',
name='study_in_uk_cta_text_de',
field=models.CharField(max_length=255, null=True),
),
migrations.AddField(
model_name='internationalhomepage',
name='study_in_uk_cta_text_en_gb',
field=models.CharField(max_length=255, null=True),
),
migrations.AddField(
model_name='internationalhomepage',
name='study_in_uk_cta_text_es',
field=models.CharField(max_length=255, null=True),
),
migrations.AddField(
model_name='internationalhomepage',
name='study_in_uk_cta_text_fr',
field=models.CharField(max_length=255, null=True),
),
migrations.AddField(
model_name='internationalhomepage',
name='study_in_uk_cta_text_ja',
field=models.CharField(max_length=255, null=True),
),
migrations.AddField(
model_name='internationalhomepage',
name='study_in_uk_cta_text_pt',
field=models.CharField(max_length=255, null=True),
),
migrations.AddField(
model_name='internationalhomepage',
name='study_in_uk_cta_text_pt_br',
field=models.CharField(max_length=255, null=True),
),
migrations.AddField(
model_name='internationalhomepage',
name='study_in_uk_cta_text_ru',
field=models.CharField(max_length=255, null=True),
),
migrations.AddField(
model_name='internationalhomepage',
name='study_in_uk_cta_text_zh_hans',
field=models.CharField(max_length=255, null=True),
),
migrations.AddField(
model_name='internationalhomepage',
name='tariffs_call_to_action_text_ar',
field=models.CharField(max_length=255, null=True),
),
migrations.AddField(
model_name='internationalhomepage',
name='tariffs_call_to_action_text_de',
field=models.CharField(max_length=255, null=True),
),
migrations.AddField(
model_name='internationalhomepage',
name='tariffs_call_to_action_text_en_gb',
field=models.CharField(max_length=255, null=True),
),
migrations.AddField(
model_name='internationalhomepage',
name='tariffs_call_to_action_text_es',
field=models.CharField(max_length=255, null=True),
),
migrations.AddField(
model_name='internationalhomepage',
name='tariffs_call_to_action_text_fr',
field=models.CharField(max_length=255, null=True),
),
migrations.AddField(
model_name='internationalhomepage',
name='tariffs_call_to_action_text_ja',
field=models.CharField(max_length=255, null=True),
),
migrations.AddField(
model_name='internationalhomepage',
name='tariffs_call_to_action_text_pt',
field=models.CharField(max_length=255, null=True),
),
migrations.AddField(
model_name='internationalhomepage',
name='tariffs_call_to_action_text_pt_br',
field=models.CharField(max_length=255, null=True),
),
migrations.AddField(
model_name='internationalhomepage',
name='tariffs_call_to_action_text_ru',
field=models.CharField(max_length=255, null=True),
),
migrations.AddField(
model_name='internationalhomepage',
name='tariffs_call_to_action_text_zh_hans',
field=models.CharField(max_length=255, null=True),
),
migrations.AddField(
model_name='internationalhomepage',
name='tariffs_description_ar',
field=core.model_fields.MarkdownField(null=True, validators=[core.validators.slug_hyperlinks]),
),
migrations.AddField(
model_name='internationalhomepage',
name='tariffs_description_de',
field=core.model_fields.MarkdownField(null=True, validators=[core.validators.slug_hyperlinks]),
),
migrations.AddField(
model_name='internationalhomepage',
name='tariffs_description_en_gb',
field=core.model_fields.MarkdownField(null=True, validators=[core.validators.slug_hyperlinks]),
),
migrations.AddField(
model_name='internationalhomepage',
name='tariffs_description_es',
field=core.model_fields.MarkdownField(null=True, validators=[core.validators.slug_hyperlinks]),
),
migrations.AddField(
model_name='internationalhomepage',
name='tariffs_description_fr',
field=core.model_fields.MarkdownField(null=True, validators=[core.validators.slug_hyperlinks]),
),
migrations.AddField(
model_name='internationalhomepage',
name='tariffs_description_ja',
field=core.model_fields.MarkdownField(null=True, validators=[core.validators.slug_hyperlinks]),
),
migrations.AddField(
model_name='internationalhomepage',
name='tariffs_description_pt',
field=core.model_fields.MarkdownField(null=True, validators=[core.validators.slug_hyperlinks]),
),
migrations.AddField(
model_name='internationalhomepage',
name='tariffs_description_pt_br',
field=core.model_fields.MarkdownField(null=True, validators=[core.validators.slug_hyperlinks]),
),
migrations.AddField(
model_name='internationalhomepage',
name='tariffs_description_ru',
field=core.model_fields.MarkdownField(null=True, validators=[core.validators.slug_hyperlinks]),
),
migrations.AddField(
model_name='internationalhomepage',
name='tariffs_description_zh_hans',
field=core.model_fields.MarkdownField(null=True, validators=[core.validators.slug_hyperlinks]),
),
migrations.AddField(
model_name='internationalhomepage',
name='tariffs_image_ar',
field=models.ForeignKey(blank=True, null=True, on_delete=django.db.models.deletion.SET_NULL, related_name='+', to='wagtailimages.Image'),
),
migrations.AddField(
model_name='internationalhomepage',
name='tariffs_image_de',
field=models.ForeignKey(blank=True, null=True, on_delete=django.db.models.deletion.SET_NULL, related_name='+', to='wagtailimages.Image'),
),
migrations.AddField(
model_name='internationalhomepage',
name='tariffs_image_en_gb',
field=models.ForeignKey(blank=True, null=True, on_delete=django.db.models.deletion.SET_NULL, related_name='+', to='wagtailimages.Image'),
),
migrations.AddField(
model_name='internationalhomepage',
name='tariffs_image_es',
field=models.ForeignKey(blank=True, null=True, on_delete=django.db.models.deletion.SET_NULL, related_name='+', to='wagtailimages.Image'),
),
migrations.AddField(
model_name='internationalhomepage',
name='tariffs_image_fr',
field=models.ForeignKey(blank=True, null=True, on_delete=django.db.models.deletion.SET_NULL, related_name='+', to='wagtailimages.Image'),
),
migrations.AddField(
model_name='internationalhomepage',
name='tariffs_image_ja',
field=models.ForeignKey(blank=True, null=True, on_delete=django.db.models.deletion.SET_NULL, related_name='+', to='wagtailimages.Image'),
),
migrations.AddField(
model_name='internationalhomepage',
name='tariffs_image_pt',
field=models.ForeignKey(blank=True, null=True, on_delete=django.db.models.deletion.SET_NULL, related_name='+', to='wagtailimages.Image'),
),
migrations.AddField(
model_name='internationalhomepage',
name='tariffs_image_pt_br',
field=models.ForeignKey(blank=True, null=True, on_delete=django.db.models.deletion.SET_NULL, related_name='+', to='wagtailimages.Image'),
),
migrations.AddField(
model_name='internationalhomepage',
name='tariffs_image_ru',
field=models.ForeignKey(blank=True, null=True, on_delete=django.db.models.deletion.SET_NULL, related_name='+', to='wagtailimages.Image'),
),
migrations.AddField(
model_name='internationalhomepage',
name='tariffs_image_zh_hans',
field=models.ForeignKey(blank=True, null=True, on_delete=django.db.models.deletion.SET_NULL, related_name='+', to='wagtailimages.Image'),
),
migrations.AddField(
model_name='internationalhomepage',
name='tariffs_link_ar',
field=models.URLField(null=True),
),
migrations.AddField(
model_name='internationalhomepage',
name='tariffs_link_de',
field=models.URLField(null=True),
),
migrations.AddField(
model_name='internationalhomepage',
name='tariffs_link_en_gb',
field=models.URLField(null=True),
),
migrations.AddField(
model_name='internationalhomepage',
name='tariffs_link_es',
field=models.URLField(null=True),
),
migrations.AddField(
model_name='internationalhomepage',
name='tariffs_link_fr',
field=models.URLField(null=True),
),
migrations.AddField(
model_name='internationalhomepage',
name='tariffs_link_ja',
field=models.URLField(null=True),
),
migrations.AddField(
model_name='internationalhomepage',
name='tariffs_link_pt',
field=models.URLField(null=True),
),
migrations.AddField(
model_name='internationalhomepage',
name='tariffs_link_pt_br',
field=models.URLField(null=True),
),
migrations.AddField(
model_name='internationalhomepage',
name='tariffs_link_ru',
field=models.URLField(null=True),
),
migrations.AddField(
model_name='internationalhomepage',
name='tariffs_link_zh_hans',
field=models.URLField(null=True),
),
migrations.AddField(
model_name='internationalhomepage',
name='tariffs_title_ar',
field=models.CharField(max_length=255, null=True),
),
migrations.AddField(
model_name='internationalhomepage',
name='tariffs_title_de',
field=models.CharField(max_length=255, null=True),
),
migrations.AddField(
model_name='internationalhomepage',
name='tariffs_title_en_gb',
field=models.CharField(max_length=255, null=True),
),
migrations.AddField(
model_name='internationalhomepage',
name='tariffs_title_es',
field=models.CharField(max_length=255, null=True),
),
migrations.AddField(
model_name='internationalhomepage',
name='tariffs_title_fr',
field=models.CharField(max_length=255, null=True),
),
migrations.AddField(
model_name='internationalhomepage',
name='tariffs_title_ja',
field=models.CharField(max_length=255, null=True),
),
migrations.AddField(
model_name='internationalhomepage',
name='tariffs_title_pt',
field=models.CharField(max_length=255, null=True),
),
migrations.AddField(
model_name='internationalhomepage',
name='tariffs_title_pt_br',
field=models.CharField(max_length=255, null=True),
),
migrations.AddField(
model_name='internationalhomepage',
name='tariffs_title_ru',
field=models.CharField(max_length=255, null=True),
),
migrations.AddField(
model_name='internationalhomepage',
name='tariffs_title_zh_hans',
field=models.CharField(max_length=255, null=True),
),
migrations.AddField(
model_name='internationalhomepage',
name='trade_content_ar',
field=core.model_fields.MarkdownField(blank=True, null=True, validators=[core.validators.slug_hyperlinks]),
),
migrations.AddField(
model_name='internationalhomepage',
name='trade_content_de',
field=core.model_fields.MarkdownField(blank=True, null=True, validators=[core.validators.slug_hyperlinks]),
),
migrations.AddField(
model_name='internationalhomepage',
name='trade_content_en_gb',
field=core.model_fields.MarkdownField(blank=True, null=True, validators=[core.validators.slug_hyperlinks]),
),
migrations.AddField(
model_name='internationalhomepage',
name='trade_content_es',
field=core.model_fields.MarkdownField(blank=True, null=True, validators=[core.validators.slug_hyperlinks]),
),
migrations.AddField(
model_name='internationalhomepage',
name='trade_content_fr',
field=core.model_fields.MarkdownField(blank=True, null=True, validators=[core.validators.slug_hyperlinks]),
),
migrations.AddField(
model_name='internationalhomepage',
name='trade_content_ja',
field=core.model_fields.MarkdownField(blank=True, null=True, validators=[core.validators.slug_hyperlinks]),
),
migrations.AddField(
model_name='internationalhomepage',
name='trade_content_pt',
field=core.model_fields.MarkdownField(blank=True, null=True, validators=[core.validators.slug_hyperlinks]),
),
migrations.AddField(
model_name='internationalhomepage',
name='trade_content_pt_br',
field=core.model_fields.MarkdownField(blank=True, null=True, validators=[core.validators.slug_hyperlinks]),
),
migrations.AddField(
model_name='internationalhomepage',
name='trade_content_ru',
field=core.model_fields.MarkdownField(blank=True, null=True, validators=[core.validators.slug_hyperlinks]),
),
migrations.AddField(
model_name='internationalhomepage',
name='trade_content_zh_hans',
field=core.model_fields.MarkdownField(blank=True, null=True, validators=[core.validators.slug_hyperlinks]),
),
migrations.AddField(
model_name='internationalhomepage',
name='trade_image_ar',
field=models.ForeignKey(blank=True, null=True, on_delete=django.db.models.deletion.SET_NULL, related_name='+', to='wagtailimages.Image'),
),
migrations.AddField(
model_name='internationalhomepage',
name='trade_image_de',
field=models.ForeignKey(blank=True, null=True, on_delete=django.db.models.deletion.SET_NULL, related_name='+', to='wagtailimages.Image'),
),
migrations.AddField(
model_name='internationalhomepage',
name='trade_image_en_gb',
field=models.ForeignKey(blank=True, null=True, on_delete=django.db.models.deletion.SET_NULL, related_name='+', to='wagtailimages.Image'),
),
migrations.AddField(
model_name='internationalhomepage',
name='trade_image_es',
field=models.ForeignKey(blank=True, null=True, on_delete=django.db.models.deletion.SET_NULL, related_name='+', to='wagtailimages.Image'),
),
migrations.AddField(
model_name='internationalhomepage',
name='trade_image_fr',
field=models.ForeignKey(blank=True, null=True, on_delete=django.db.models.deletion.SET_NULL, related_name='+', to='wagtailimages.Image'),
),
migrations.AddField(
model_name='internationalhomepage',
name='trade_image_ja',
field=models.ForeignKey(blank=True, null=True, on_delete=django.db.models.deletion.SET_NULL, related_name='+', to='wagtailimages.Image'),
),
migrations.AddField(
model_name='internationalhomepage',
name='trade_image_pt',
field=models.ForeignKey(blank=True, null=True, on_delete=django.db.models.deletion.SET_NULL, related_name='+', to='wagtailimages.Image'),
),
migrations.AddField(
model_name='internationalhomepage',
name='trade_image_pt_br',
field=models.ForeignKey(blank=True, null=True, on_delete=django.db.models.deletion.SET_NULL, related_name='+', to='wagtailimages.Image'),
),
migrations.AddField(
model_name='internationalhomepage',
name='trade_image_ru',
field=models.ForeignKey(blank=True, null=True, on_delete=django.db.models.deletion.SET_NULL, related_name='+', to='wagtailimages.Image'),
),
migrations.AddField(
model_name='internationalhomepage',
name='trade_image_zh_hans',
field=models.ForeignKey(blank=True, null=True, on_delete=django.db.models.deletion.SET_NULL, related_name='+', to='wagtailimages.Image'),
),
migrations.AddField(
model_name='internationalhomepage',
name='trade_title_ar',
field=models.CharField(max_length=255, null=True),
),
migrations.AddField(
model_name='internationalhomepage',
name='trade_title_de',
field=models.CharField(max_length=255, null=True),
),
migrations.AddField(
model_name='internationalhomepage',
name='trade_title_en_gb',
field=models.CharField(max_length=255, null=True),
),
migrations.AddField(
model_name='internationalhomepage',
name='trade_title_es',
field=models.CharField(max_length=255, null=True),
),
migrations.AddField(
model_name='internationalhomepage',
name='trade_title_fr',
field=models.CharField(max_length=255, null=True),
),
migrations.AddField(
model_name='internationalhomepage',
name='trade_title_ja',
field=models.CharField(max_length=255, null=True),
),
migrations.AddField(
model_name='internationalhomepage',
name='trade_title_pt',
field=models.CharField(max_length=255, null=True),
),
migrations.AddField(
model_name='internationalhomepage',
name='trade_title_pt_br',
field=models.CharField(max_length=255, null=True),
),
migrations.AddField(
model_name='internationalhomepage',
name='trade_title_ru',
field=models.CharField(max_length=255, null=True),
),
migrations.AddField(
model_name='internationalhomepage',
name='trade_title_zh_hans',
field=models.CharField(max_length=255, null=True),
),
migrations.AddField(
model_name='internationalhomepage',
name='visit_uk_cta_text_ar',
field=models.CharField(max_length=255, null=True),
),
migrations.AddField(
model_name='internationalhomepage',
name='visit_uk_cta_text_de',
field=models.CharField(max_length=255, null=True),
),
migrations.AddField(
model_name='internationalhomepage',
name='visit_uk_cta_text_en_gb',
field=models.CharField(max_length=255, null=True),
),
migrations.AddField(
model_name='internationalhomepage',
name='visit_uk_cta_text_es',
field=models.CharField(max_length=255, null=True),
),
migrations.AddField(
model_name='internationalhomepage',
name='visit_uk_cta_text_fr',
field=models.CharField(max_length=255, null=True),
),
migrations.AddField(
model_name='internationalhomepage',
name='visit_uk_cta_text_ja',
field=models.CharField(max_length=255, null=True),
),
migrations.AddField(
model_name='internationalhomepage',
name='visit_uk_cta_text_pt',
field=models.CharField(max_length=255, null=True),
),
migrations.AddField(
model_name='internationalhomepage',
name='visit_uk_cta_text_pt_br',
field=models.CharField(max_length=255, null=True),
),
migrations.AddField(
model_name='internationalhomepage',
name='visit_uk_cta_text_ru',
field=models.CharField(max_length=255, null=True),
),
migrations.AddField(
model_name='internationalhomepage',
name='visit_uk_cta_text_zh_hans',
field=models.CharField(max_length=255, null=True),
),
]
| 44.943044
| 149
| 0.62407
| 4,823
| 48,134
| 5.979681
| 0.020527
| 0.131068
| 0.167476
| 0.196602
| 0.987656
| 0.987656
| 0.98724
| 0.986824
| 0.986026
| 0.983356
| 0
| 0.0096
| 0.259879
| 48,134
| 1,070
| 150
| 44.985047
| 0.799944
| 0.001434
| 0
| 0.790216
| 1
| 0
| 0.201573
| 0.120511
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| false
| 0
| 0.004704
| 0
| 0.007526
| 0
| 0
| 0
| 0
| null | 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 1
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 9
|
fda739ab5df1d356b2e633019ca5e862975afe3e
| 125
|
py
|
Python
|
serie4/docu/Code/start.py
|
Koopakiller/Edu-NLA
|
8376557cab9f74cedd19ee1573a8c71d7e415dd4
|
[
"MIT"
] | null | null | null |
serie4/docu/Code/start.py
|
Koopakiller/Edu-NLA
|
8376557cab9f74cedd19ee1573a8c71d7e415dd4
|
[
"MIT"
] | null | null | null |
serie4/docu/Code/start.py
|
Koopakiller/Edu-NLA
|
8376557cab9f74cedd19ee1573a8c71d7e415dd4
|
[
"MIT"
] | null | null | null |
def main(file_name="data.txt", n=7):
# Code
pass
if __name__ == "__main__":
main(file_name="data_sym.txt", n=7)
| 17.857143
| 39
| 0.616
| 21
| 125
| 3.142857
| 0.571429
| 0.242424
| 0.363636
| 0.484848
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.02
| 0.2
| 125
| 6
| 40
| 20.833333
| 0.64
| 0.032
| 0
| 0
| 0
| 0
| 0.235294
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0.25
| false
| 0.25
| 0
| 0
| 0.25
| 0
| 1
| 0
| 0
| null | 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 0
| 0
| 0
| 0
|
0
| 7
|
e338cf8b18b50f142281acd3fba769c180c7b868
| 9,352
|
py
|
Python
|
tests/test_vector_index_to_coords.py
|
lantunes/deepdab
|
0e30f102b9d7c37d3691540496b1649f2704d586
|
[
"Apache-2.0"
] | 1
|
2019-04-04T02:26:51.000Z
|
2019-04-04T02:26:51.000Z
|
tests/test_vector_index_to_coords.py
|
lantunes/deepdab
|
0e30f102b9d7c37d3691540496b1649f2704d586
|
[
"Apache-2.0"
] | null | null | null |
tests/test_vector_index_to_coords.py
|
lantunes/deepdab
|
0e30f102b9d7c37d3691540496b1649f2704d586
|
[
"Apache-2.0"
] | null | null | null |
import unittest
import deepdab
class TestVectorIndexToCoords(unittest.TestCase):
def test_convert_vector_index_to_coordinates_1x1(self):
coordinates = deepdab.convert_vector_index_to_coordinates((1, 1), 0)
self.assertEqual(coordinates, [(0, 1)])
coordinates = deepdab.convert_vector_index_to_coordinates((1, 1), 1)
self.assertEqual(coordinates, [(1, 0)])
coordinates = deepdab.convert_vector_index_to_coordinates((1, 1), 2)
self.assertEqual(coordinates, [(1, 2)])
coordinates = deepdab.convert_vector_index_to_coordinates((1, 1), 3)
self.assertEqual(coordinates, [(2, 1)])
def test_convert_vector_index_to_coordinates_1x1_include_dots(self):
coordinates = deepdab.convert_vector_index_to_coordinates((1, 1), 0, include_dots=True)
self.assertEqual(coordinates, [(0, 0), (0, 1), (0, 2)])
coordinates = deepdab.convert_vector_index_to_coordinates((1, 1), 1, include_dots=True)
self.assertEqual(coordinates, [(0, 0), (1, 0), (2, 0)])
coordinates = deepdab.convert_vector_index_to_coordinates((1, 1), 2, include_dots=True)
self.assertEqual(coordinates, [(0, 2), (1, 2), (2, 2)])
coordinates = deepdab.convert_vector_index_to_coordinates((1, 1), 3, include_dots=True)
self.assertEqual(coordinates, [(2, 0), (2, 1), (2, 2)])
def test_convert_vector_index_to_coordinates_1x1_edge_length(self):
coordinates = deepdab.convert_vector_index_to_coordinates((1, 1), 0, edge_length=2)
self.assertEqual(coordinates, [(0, 1), (0, 2)])
coordinates = deepdab.convert_vector_index_to_coordinates((1, 1), 1, edge_length=2)
self.assertEqual(coordinates, [(1, 0), (2, 0)])
coordinates = deepdab.convert_vector_index_to_coordinates((1, 1), 2, edge_length=2)
self.assertEqual(coordinates, [(1, 3), (2, 3)])
coordinates = deepdab.convert_vector_index_to_coordinates((1, 1), 3, edge_length=2)
self.assertEqual(coordinates, [(3, 1), (3, 2)])
def test_convert_vector_index_to_coordinates_1x1_edge_length_include_dots(self):
coordinates = deepdab.convert_vector_index_to_coordinates((1, 1), 0, edge_length=2, include_dots=True)
self.assertEqual(coordinates, [(0, 0), (0, 1), (0, 2), (0, 3)])
coordinates = deepdab.convert_vector_index_to_coordinates((1, 1), 1, edge_length=2, include_dots=True)
self.assertEqual(coordinates, [(0, 0), (1, 0), (2, 0), (3, 0)])
coordinates = deepdab.convert_vector_index_to_coordinates((1, 1), 2, edge_length=2, include_dots=True)
self.assertEqual(coordinates, [(0, 3), (1, 3), (2, 3), (3, 3)])
coordinates = deepdab.convert_vector_index_to_coordinates((1, 1), 3, edge_length=2, include_dots=True)
self.assertEqual(coordinates, [(3, 0), (3, 1), (3, 2), (3, 3)])
def test_convert_vector_index_to_coordinates_1x2(self):
coordinates = deepdab.convert_vector_index_to_coordinates((1, 2), 0)
self.assertEqual(coordinates, [(0, 1)])
coordinates = deepdab.convert_vector_index_to_coordinates((1, 2), 1)
self.assertEqual(coordinates, [(0, 3)])
coordinates = deepdab.convert_vector_index_to_coordinates((1, 2), 2)
self.assertEqual(coordinates, [(1, 0)])
coordinates = deepdab.convert_vector_index_to_coordinates((1, 2), 3)
self.assertEqual(coordinates, [(1, 2)])
coordinates = deepdab.convert_vector_index_to_coordinates((1, 2), 4)
self.assertEqual(coordinates, [(1, 4)])
coordinates = deepdab.convert_vector_index_to_coordinates((1, 2), 5)
self.assertEqual(coordinates, [(2, 1)])
coordinates = deepdab.convert_vector_index_to_coordinates((1, 2), 6)
self.assertEqual(coordinates, [(2, 3)])
def test_convert_vector_index_to_coordinates_2x1(self):
coordinates = deepdab.convert_vector_index_to_coordinates((2, 1), 0)
self.assertEqual(coordinates, [(0, 1)])
coordinates = deepdab.convert_vector_index_to_coordinates((2, 1), 1)
self.assertEqual(coordinates, [(1, 0)])
coordinates = deepdab.convert_vector_index_to_coordinates((2, 1), 2)
self.assertEqual(coordinates, [(1, 2)])
coordinates = deepdab.convert_vector_index_to_coordinates((2, 1), 3)
self.assertEqual(coordinates, [(2, 1)])
coordinates = deepdab.convert_vector_index_to_coordinates((2, 1), 4)
self.assertEqual(coordinates, [(3, 0)])
coordinates = deepdab.convert_vector_index_to_coordinates((2, 1), 5)
self.assertEqual(coordinates, [(3, 2)])
coordinates = deepdab.convert_vector_index_to_coordinates((2, 1), 6)
self.assertEqual(coordinates, [(4, 1)])
def test_convert_vector_index_to_coordinates_2x2(self):
coordinates = deepdab.convert_vector_index_to_coordinates((2, 2), 0)
self.assertEqual(coordinates, [(0, 1)])
coordinates = deepdab.convert_vector_index_to_coordinates((2, 2), 1)
self.assertEqual(coordinates, [(0, 3)])
coordinates = deepdab.convert_vector_index_to_coordinates((2, 2), 2)
self.assertEqual(coordinates, [(1, 0)])
coordinates = deepdab.convert_vector_index_to_coordinates((2, 2), 3)
self.assertEqual(coordinates, [(1, 2)])
coordinates = deepdab.convert_vector_index_to_coordinates((2, 2), 4)
self.assertEqual(coordinates, [(1, 4)])
coordinates = deepdab.convert_vector_index_to_coordinates((2, 2), 5)
self.assertEqual(coordinates, [(2, 1)])
coordinates = deepdab.convert_vector_index_to_coordinates((2, 2), 6)
self.assertEqual(coordinates, [(2, 3)])
coordinates = deepdab.convert_vector_index_to_coordinates((2, 2), 7)
self.assertEqual(coordinates, [(3, 0)])
coordinates = deepdab.convert_vector_index_to_coordinates((2, 2), 8)
self.assertEqual(coordinates, [(3, 2)])
coordinates = deepdab.convert_vector_index_to_coordinates((2, 2), 9)
self.assertEqual(coordinates, [(3, 4)])
coordinates = deepdab.convert_vector_index_to_coordinates((2, 2), 10)
self.assertEqual(coordinates, [(4, 1)])
coordinates = deepdab.convert_vector_index_to_coordinates((2, 2), 11)
self.assertEqual(coordinates, [(4, 3)])
def test_convert_vector_index_to_coordinates_3x3(self):
coordinates = deepdab.convert_vector_index_to_coordinates((3, 3), 0)
self.assertEqual(coordinates, [(0, 1)])
coordinates = deepdab.convert_vector_index_to_coordinates((3, 3), 1)
self.assertEqual(coordinates, [(0, 3)])
coordinates = deepdab.convert_vector_index_to_coordinates((3, 3), 2)
self.assertEqual(coordinates, [(0, 5)])
coordinates = deepdab.convert_vector_index_to_coordinates((3, 3), 3)
self.assertEqual(coordinates, [(1, 0)])
coordinates = deepdab.convert_vector_index_to_coordinates((3, 3), 4)
self.assertEqual(coordinates, [(1, 2)])
coordinates = deepdab.convert_vector_index_to_coordinates((3, 3), 5)
self.assertEqual(coordinates, [(1, 4)])
coordinates = deepdab.convert_vector_index_to_coordinates((3, 3), 6)
self.assertEqual(coordinates, [(1, 6)])
coordinates = deepdab.convert_vector_index_to_coordinates((3, 3), 7)
self.assertEqual(coordinates, [(2, 1)])
coordinates = deepdab.convert_vector_index_to_coordinates((3, 3), 8)
self.assertEqual(coordinates, [(2, 3)])
coordinates = deepdab.convert_vector_index_to_coordinates((3, 3), 9)
self.assertEqual(coordinates, [(2, 5)])
coordinates = deepdab.convert_vector_index_to_coordinates((3, 3), 10)
self.assertEqual(coordinates, [(3, 0)])
coordinates = deepdab.convert_vector_index_to_coordinates((3, 3), 11)
self.assertEqual(coordinates, [(3, 2)])
coordinates = deepdab.convert_vector_index_to_coordinates((3, 3), 12)
self.assertEqual(coordinates, [(3, 4)])
coordinates = deepdab.convert_vector_index_to_coordinates((3, 3), 13)
self.assertEqual(coordinates, [(3, 6)])
coordinates = deepdab.convert_vector_index_to_coordinates((3, 3), 14)
self.assertEqual(coordinates, [(4, 1)])
coordinates = deepdab.convert_vector_index_to_coordinates((3, 3), 15)
self.assertEqual(coordinates, [(4, 3)])
coordinates = deepdab.convert_vector_index_to_coordinates((3, 3), 16)
self.assertEqual(coordinates, [(4, 5)])
coordinates = deepdab.convert_vector_index_to_coordinates((3, 3), 17)
self.assertEqual(coordinates, [(5, 0)])
coordinates = deepdab.convert_vector_index_to_coordinates((3, 3), 18)
self.assertEqual(coordinates, [(5, 2)])
coordinates = deepdab.convert_vector_index_to_coordinates((3, 3), 19)
self.assertEqual(coordinates, [(5, 4)])
coordinates = deepdab.convert_vector_index_to_coordinates((3, 3), 20)
self.assertEqual(coordinates, [(5, 6)])
coordinates = deepdab.convert_vector_index_to_coordinates((3, 3), 21)
self.assertEqual(coordinates, [(6, 1)])
coordinates = deepdab.convert_vector_index_to_coordinates((3, 3), 22)
self.assertEqual(coordinates, [(6, 3)])
coordinates = deepdab.convert_vector_index_to_coordinates((3, 3), 23)
self.assertEqual(coordinates, [(6, 5)])
| 60.335484
| 110
| 0.680175
| 1,174
| 9,352
| 5.132027
| 0.041738
| 0.159668
| 0.221079
| 0.245643
| 0.928797
| 0.90888
| 0.893444
| 0.873195
| 0.825892
| 0.783237
| 0
| 0.054719
| 0.183169
| 9,352
| 154
| 111
| 60.727273
| 0.733997
| 0
| 0
| 0.293706
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.461538
| 1
| 0.055944
| false
| 0
| 0.013986
| 0
| 0.076923
| 0
| 0
| 0
| 0
| null | 0
| 1
| 1
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 9
|
e374d42bd89560a9bbec64d855527670cd850c1d
| 60
|
py
|
Python
|
src/utils/Testing_Folder/Fish/Aliens/AlphaCentauri.py
|
jonoddram/CodetagCrawler
|
6b6eaed738aef91fead3d2aaf3ab96e1cecd0cdf
|
[
"MIT"
] | null | null | null |
src/utils/Testing_Folder/Fish/Aliens/AlphaCentauri.py
|
jonoddram/CodetagCrawler
|
6b6eaed738aef91fead3d2aaf3ab96e1cecd0cdf
|
[
"MIT"
] | null | null | null |
src/utils/Testing_Folder/Fish/Aliens/AlphaCentauri.py
|
jonoddram/CodetagCrawler
|
6b6eaed738aef91fead3d2aaf3ab96e1cecd0cdf
|
[
"MIT"
] | null | null | null |
# EXTERMINATE EXTERMINATE EXTERMINATE
# TODOCTOR EXTERMINATE
| 30
| 37
| 0.866667
| 5
| 60
| 10.4
| 0.4
| 0.846154
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.1
| 60
| 2
| 38
| 30
| 0.962963
| 0.933333
| 0
| null | 0
| null | 0
| 0
| null | 0
| 0
| 0.5
| null | 1
| null | true
| 0
| 0
| null | null | null | 1
| 0
| 1
| null | 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 1
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
|
0
| 7
|
e375ecb9ac50ceae4b4b6fe0f78e748efdf6224a
| 88
|
py
|
Python
|
src/utils/criterion_functions.py
|
Hannemit/dog_breed_classifier
|
3e9cc6abe8a4dcb91eebcd4aaacaa04502fe9682
|
[
"MIT"
] | null | null | null |
src/utils/criterion_functions.py
|
Hannemit/dog_breed_classifier
|
3e9cc6abe8a4dcb91eebcd4aaacaa04502fe9682
|
[
"MIT"
] | null | null | null |
src/utils/criterion_functions.py
|
Hannemit/dog_breed_classifier
|
3e9cc6abe8a4dcb91eebcd4aaacaa04502fe9682
|
[
"MIT"
] | null | null | null |
import torch.nn as nn
def cross_entropy_loss():
return nn.CrossEntropyLoss()
| 14.666667
| 33
| 0.704545
| 12
| 88
| 5
| 0.833333
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.215909
| 88
| 5
| 34
| 17.6
| 0.869565
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0.333333
| true
| 0
| 0.333333
| 0.333333
| 1
| 0
| 1
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 1
| 1
| 0
| 1
| 1
| 1
| 0
|
0
| 7
|
8b635f8990663175b0b492d20f35501c7f499db6
| 154
|
py
|
Python
|
RoIlayer/__init__.py
|
doanthuyennt/DRRG
|
99690dcde3ffb75d6ed5fda61a4078075376a390
|
[
"MIT"
] | 227
|
2020-03-09T03:07:42.000Z
|
2022-03-12T06:09:06.000Z
|
RoIlayer/__init__.py
|
doanthuyennt/DRRG
|
99690dcde3ffb75d6ed5fda61a4078075376a390
|
[
"MIT"
] | 28
|
2020-09-01T02:19:05.000Z
|
2022-01-11T11:51:21.000Z
|
RoIlayer/__init__.py
|
doanthuyennt/DRRG
|
99690dcde3ffb75d6ed5fda61a4078075376a390
|
[
"MIT"
] | 50
|
2020-03-18T01:58:51.000Z
|
2022-03-27T08:50:31.000Z
|
# Copyright (c) Facebook, Inc. and its affiliates. All Rights Reserved.
import torch
from .rroi_align import RROIAlign
from .rroi_align import rroi_align
| 30.8
| 71
| 0.805195
| 23
| 154
| 5.26087
| 0.695652
| 0.223141
| 0.214876
| 0.31405
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.136364
| 154
| 5
| 72
| 30.8
| 0.909774
| 0.448052
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| true
| 0
| 1
| 0
| 1
| 0
| 1
| 0
| 0
| null | 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 0
| 1
| 0
|
0
| 8
|
8bb66a2e95e70b252a05ec2613305d46f203a942
| 41,267
|
py
|
Python
|
app/presenter/tests/tests_views_json_ViewGitFeed_commit.py
|
imvu/bluesteel
|
ab52133249a693b3cd2d8593c5d47408a3b0fce6
|
[
"MIT"
] | 10
|
2017-01-13T06:28:04.000Z
|
2020-11-18T13:00:26.000Z
|
app/presenter/tests/tests_views_json_ViewGitFeed_commit.py
|
imvu/bluesteel
|
ab52133249a693b3cd2d8593c5d47408a3b0fce6
|
[
"MIT"
] | null | null | null |
app/presenter/tests/tests_views_json_ViewGitFeed_commit.py
|
imvu/bluesteel
|
ab52133249a693b3cd2d8593c5d47408a3b0fce6
|
[
"MIT"
] | 2
|
2018-03-29T14:10:53.000Z
|
2019-11-20T08:21:57.000Z
|
""" Git Feed Views commit tests """
from django.test import TestCase
from django.test import Client
from django.conf import settings
from django.utils import timezone
from django.contrib.auth.models import User
from app.logic.gitrepo.models.GitProjectModel import GitProjectEntry
from app.logic.gitrepo.models.GitUserModel import GitUserEntry
from app.logic.gitrepo.models.GitCommitModel import GitCommitEntry
from app.logic.gitrepo.models.GitParentModel import GitParentEntry
from app.logic.gitrepo.models.GitDiffModel import GitDiffEntry
from app.logic.gitrepo.models.GitBranchModel import GitBranchEntry
from app.logic.gitrepo.models.GitBranchTrailModel import GitBranchTrailEntry
from app.logic.gitfeeder.helper import FeederTestHelper
from app.logic.gitfeeder.models.FeedModel import FeedEntry
from app.logic.commandrepo.models.CommandGroupModel import CommandGroupEntry
from app.logic.commandrepo.models.CommandSetModel import CommandSetEntry
from app.logic.commandrepo.models.CommandModel import CommandEntry
from app.logic.commandrepo.models.CommandResultModel import CommandResultEntry
from app.logic.benchmark.models.BenchmarkDefinitionModel import BenchmarkDefinitionEntry
from app.logic.benchmark.models.BenchmarkExecutionModel import BenchmarkExecutionEntry
from app.logic.benchmark.models.BenchmarkFluctuationWaiverModel import BenchmarkFluctuationWaiverEntry
from app.logic.bluesteel.models.BluesteelLayoutModel import BluesteelLayoutEntry
from app.logic.bluesteel.models.BluesteelProjectModel import BluesteelProjectEntry
from app.logic.bluesteelworker.models.WorkerModel import WorkerEntry
from app.logic.mailing.models.StackedMailModel import StackedMailEntry
from app.logic.httpcommon import res
from datetime import timedelta
import json
import os
import hashlib
import shutil
class GitFeedViewsCommitTestCase(TestCase):
def setUp(self):
self.client = Client()
self.user1 = User.objects.create_user('user1@test.com', 'user1@test.com', 'pass1')
self.user1.save()
self.user2 = User.objects.create_user('user2@test.com', 'user2@test.com', 'pass2')
self.user2.save()
self.git_project1 = GitProjectEntry.objects.create(url='http://test/')
self.git_user1 = GitUserEntry.objects.create(
project=self.git_project1,
name='user1',
email='user1@test.com'
)
def tearDown(self):
pass
def test_feed_simple_commit(self):
commit_time = str(timezone.now().isoformat())
commit1 = FeederTestHelper.create_commit(1, [], 'user1', 'user1@test.com', commit_time, commit_time)
branch1 = FeederTestHelper.create_branch('master', 1, 'master', 1, 1, [1], 'merge-target-content')
feed_data = {}
feed_data['commits'] = []
feed_data['commits'].append(commit1)
feed_data['branches'] = []
feed_data['branches'].append(branch1)
post_data = FeederTestHelper.create_feed_data(feed_data)
resp = self.client.post(
'/main/feed/commit/project/{0}/'.format(self.git_project1.id),
data = json.dumps(post_data),
content_type='application/json')
res.check_cross_origin_headers(self, resp)
resp_obj = json.loads(resp.content)
self.assertEqual(200, resp_obj['status'])
self.assertEqual(1, GitCommitEntry.objects.all().count())
self.assertEqual(1, GitBranchEntry.objects.all().count())
branch_entry = GitBranchEntry.objects.all().first()
self.assertEqual('0000100001000010000100001000010000100001', branch_entry.commit.commit_hash)
self.assertEqual('master', branch_entry.name)
def test_feed_reports_have_correct_user_assign(self):
post_data = {}
post_data['reports'] = FeederTestHelper.get_default_report(1)
self.client.login(username='user1@test.com', password='pass1')
resp = self.client.post(
'/main/feed/report/project/{0}/'.format(self.git_project1.id),
data = json.dumps(post_data),
content_type='application/json')
res.check_cross_origin_headers(self, resp)
resp_obj = json.loads(resp.content)
self.assertEqual(200, resp_obj['status'])
self.assertEqual(1, CommandGroupEntry.objects.filter(user=self.user1).count())
def test_feed_simple_commit_already_present(self):
git_commit1 = GitCommitEntry.objects.create(
project=self.git_project1,
commit_hash='0000100001000010000100001000010000100001',
author=self.git_user1,
author_date=timezone.now(),
committer=self.git_user1,
committer_date=timezone.now()
)
commit_time = str(timezone.now().isoformat())
commit1 = FeederTestHelper.create_commit(1, [], 'user1', 'user1@test.com', commit_time, commit_time)
branch1 = FeederTestHelper.create_branch('master', 1, 'master', 1, 1, [1], 'merge-target-content')
feed_data = {}
feed_data['commits'] = []
feed_data['commits'].append(commit1)
feed_data['branches'] = []
feed_data['branches'].append(branch1)
post_data = FeederTestHelper.create_feed_data(feed_data)
resp = self.client.post(
'/main/feed/commit/project/{0}/'.format(self.git_project1.id),
data = json.dumps(post_data),
content_type='application/json')
res.check_cross_origin_headers(self, resp)
resp_obj = json.loads(resp.content)
self.assertEqual(200, resp_obj['status'])
self.assertEqual(1, GitCommitEntry.objects.all().count())
self.assertEqual(1, GitBranchEntry.objects.all().count())
branch_entry = GitBranchEntry.objects.all().first()
self.assertEqual('0000100001000010000100001000010000100001', branch_entry.commit.commit_hash)
self.assertEqual('master', branch_entry.name)
def test_feed_fail_because_commits_are_not_unique(self):
commit_time = str(timezone.now().isoformat())
commit1 = FeederTestHelper.create_commit(1, [], 'user1', 'user1@test.com', commit_time, commit_time)
commit2 = FeederTestHelper.create_commit(1, [], 'user1', 'user1@test.com', commit_time, commit_time)
commit3 = FeederTestHelper.create_commit(2, [], 'user1', 'user1@test.com', commit_time, commit_time)
commit4 = FeederTestHelper.create_commit(3, [], 'user1', 'user1@test.com', commit_time, commit_time)
branch1 = FeederTestHelper.create_branch('master', 3, 'master', 3, 1, [3, 1], 'merge-target-content')
feed_data = {}
feed_data['commits'] = []
feed_data['commits'].append(commit1)
feed_data['commits'].append(commit2)
feed_data['commits'].append(commit3)
feed_data['commits'].append(commit4)
feed_data['branches'] = []
feed_data['branches'].append(branch1)
post_data = FeederTestHelper.create_feed_data(feed_data)
resp = self.client.post(
'/main/feed/commit/project/{0}/'.format(self.git_project1.id),
data = json.dumps(post_data),
content_type='application/json')
res.check_cross_origin_headers(self, resp)
resp_obj = json.loads(resp.content)
self.assertEqual(400, resp_obj['status'])
self.assertEqual('Commits not correct', resp_obj['message'])
self.assertEqual(0, GitCommitEntry.objects.all().count())
self.assertEqual(0, GitBranchEntry.objects.all().count())
def test_two_commits_one_parent(self):
commit_time = str(timezone.now().isoformat())
commit1 = FeederTestHelper.create_commit(1, [], 'user1', 'user1@test.com', commit_time, commit_time)
commit2 = FeederTestHelper.create_commit(2, [], 'user1', 'user1@test.com', commit_time, commit_time)
commit1['parent_hashes'].append(commit2['hash'])
branch1 = FeederTestHelper.create_branch('master', 1, 'master', 1, 1, [1], 'merge-target-content')
feed_data = {}
feed_data['commits'] = []
feed_data['commits'].append(commit1)
feed_data['commits'].append(commit2)
feed_data['branches'] = []
feed_data['branches'].append(branch1)
post_data = FeederTestHelper.create_feed_data(feed_data)
resp = self.client.post(
'/main/feed/commit/project/{0}/'.format(self.git_project1.id),
data = json.dumps(post_data),
content_type='application/json')
res.check_cross_origin_headers(self, resp)
resp_obj = json.loads(resp.content)
self.assertEqual(200, resp_obj['status'])
self.assertEqual(2, GitCommitEntry.objects.all().count())
self.assertEqual(1, GitParentEntry.objects.all().count())
self.assertEqual(1, GitBranchEntry.objects.all().count())
parent_entry = GitParentEntry.objects.all().first()
self.assertEqual('0000200002000020000200002000020000200002', parent_entry.parent.commit_hash)
self.assertEqual('0000100001000010000100001000010000100001', parent_entry.son.commit_hash)
branch_entry = GitBranchEntry.objects.all().first()
self.assertEqual('0000100001000010000100001000010000100001', branch_entry.commit.commit_hash)
self.assertEqual('master', branch_entry.name)
def test_feed_fails_because_bad_constructed_parent(self):
commit_time = str(timezone.now().isoformat())
commit1 = FeederTestHelper.create_commit(1, [], 'user1', 'user1@test.com', commit_time, commit_time)
commit2 = FeederTestHelper.create_commit(2, [3], 'user1', 'user1@test.com', commit_time, commit_time)
branch1 = FeederTestHelper.create_branch('master', 2, 'master', 2, 1, [2, 1], 'merge-target-content')
feed_data = {}
feed_data['commits'] = []
feed_data['commits'].append(commit1)
feed_data['commits'].append(commit2)
feed_data['branches'] = []
feed_data['branches'].append(branch1)
post_data = FeederTestHelper.create_feed_data(feed_data)
resp = self.client.post(
'/main/feed/commit/project/{0}/'.format(self.git_project1.id),
data = json.dumps(post_data),
content_type='application/json')
res.check_cross_origin_headers(self, resp)
resp_obj = json.loads(resp.content)
self.assertEqual(400, resp_obj['status'])
self.assertEqual('Parents not correct', resp_obj['message'])
def test_feed_only_care_about_the_first_parent_of_every_commit(self):
commit_time = str(timezone.now().isoformat())
commit1 = FeederTestHelper.create_commit(1, [], 'user1', 'user1@test.com', commit_time, commit_time)
commit2 = FeederTestHelper.create_commit(2, [1, 3], 'user1', 'user1@test.com', commit_time, commit_time)
branch1 = FeederTestHelper.create_branch('master', 2, 'master', 2, 1, [2, 1], 'merge-target-content')
feed_data = {}
feed_data['commits'] = []
feed_data['commits'].append(commit1)
feed_data['commits'].append(commit2)
feed_data['branches'] = []
feed_data['branches'].append(branch1)
post_data = FeederTestHelper.create_feed_data(feed_data)
resp = self.client.post(
'/main/feed/commit/project/{0}/'.format(self.git_project1.id),
data = json.dumps(post_data),
content_type='application/json')
res.check_cross_origin_headers(self, resp)
resp_obj = json.loads(resp.content)
self.assertEqual(200, resp_obj['status'])
self.assertEqual('Commits added correctly', resp_obj['message'])
def test_feed_two_times(self):
commit_time = str(timezone.now().isoformat())
commit1 = FeederTestHelper.create_commit(1, [], 'user1', 'user1@test.com', commit_time, commit_time)
commit2 = FeederTestHelper.create_commit(2, [1], 'user1', 'user1@test.com', commit_time, commit_time)
commit3 = FeederTestHelper.create_commit(3, [1], 'user1', 'user1@test.com', commit_time, commit_time)
branch1 = FeederTestHelper.create_branch('master', 2, 'master', 2, 1, [2, 1], 'merge-target-content')
branch2 = FeederTestHelper.create_branch('branch-1', 3, 'master', 2, 1, [3, 1], 'merge-target-content-2')
feed_data = {}
feed_data['commits'] = []
feed_data['commits'].append(commit1)
feed_data['commits'].append(commit2)
feed_data['commits'].append(commit3)
feed_data['branches'] = []
feed_data['branches'].append(branch1)
feed_data['branches'].append(branch2)
post_data = FeederTestHelper.create_feed_data(feed_data)
resp = self.client.post(
'/main/feed/commit/project/{0}/'.format(self.git_project1.id),
data = json.dumps(post_data),
content_type='application/json')
res.check_cross_origin_headers(self, resp)
resp_obj = json.loads(resp.content)
self.assertEqual(200, resp_obj['status'])
commit4 = FeederTestHelper.create_commit(4, [2], 'user1', 'user1@test.com', commit_time, commit_time)
commit5 = FeederTestHelper.create_commit(5, [3], 'user1', 'user1@test.com', commit_time, commit_time)
branch1 = FeederTestHelper.create_branch('master', 4, 'master', 4, 1, [4, 2, 1], 'merge-target-content-3')
branch2 = FeederTestHelper.create_branch('branch-1', 5, 'master', 4, 1, [5, 3, 1], 'merge-target-content-4')
feed_data = {}
feed_data['commits'] = []
feed_data['commits'].append(commit4)
feed_data['commits'].append(commit5)
feed_data['branches'] = []
feed_data['branches'].append(branch1)
feed_data['branches'].append(branch2)
post_data = FeederTestHelper.create_feed_data(feed_data)
resp = self.client.post(
'/main/feed/commit/project/{0}/'.format(self.git_project1.id),
data = json.dumps(post_data),
content_type='application/json')
res.check_cross_origin_headers(self, resp)
resp_obj = json.loads(resp.content)
self.assertEqual(200, resp_obj['status'])
self.assertEqual(5, GitCommitEntry.objects.all().count())
self.assertEqual(4, GitParentEntry.objects.all().count())
self.assertEqual(2, GitBranchEntry.objects.all().count())
self.assertEqual(6, GitBranchTrailEntry.objects.all().count())
self.assertIsNotNone(GitCommitEntry.objects.filter(commit_hash='0000100001000010000100001000010000100001').first())
self.assertIsNotNone(GitCommitEntry.objects.filter(commit_hash='0000200002000020000200002000020000200002').first())
self.assertIsNotNone(GitCommitEntry.objects.filter(commit_hash='0000300003000030000300003000030000300003').first())
self.assertIsNotNone(GitCommitEntry.objects.filter(commit_hash='0000400004000040000400004000040000400004').first())
self.assertIsNotNone(GitCommitEntry.objects.filter(commit_hash='0000500005000050000500005000050000500005').first())
self.assertIsNotNone(GitParentEntry.objects.filter(parent__commit_hash='0000100001000010000100001000010000100001', son__commit_hash='0000200002000020000200002000020000200002').first())
self.assertIsNotNone(GitParentEntry.objects.filter(parent__commit_hash='0000100001000010000100001000010000100001', son__commit_hash='0000300003000030000300003000030000300003').first())
self.assertIsNotNone(GitParentEntry.objects.filter(parent__commit_hash='0000200002000020000200002000020000200002', son__commit_hash='0000400004000040000400004000040000400004').first())
self.assertIsNotNone(GitParentEntry.objects.filter(parent__commit_hash='0000300003000030000300003000030000300003', son__commit_hash='0000500005000050000500005000050000500005').first())
self.assertIsNotNone(GitBranchEntry.objects.filter(name='master').first())
self.assertIsNotNone(GitBranchEntry.objects.filter(name='branch-1').first())
self.assertIsNotNone(GitBranchTrailEntry.objects.filter(commit__commit_hash='0000100001000010000100001000010000100001', branch__name='master').first())
self.assertIsNotNone(GitBranchTrailEntry.objects.filter(commit__commit_hash='0000200002000020000200002000020000200002', branch__name='master').first())
self.assertIsNotNone(GitBranchTrailEntry.objects.filter(commit__commit_hash='0000400004000040000400004000040000400004', branch__name='master').first())
self.assertIsNotNone(GitBranchTrailEntry.objects.filter(commit__commit_hash='0000100001000010000100001000010000100001', branch__name='branch-1').first())
self.assertIsNotNone(GitBranchTrailEntry.objects.filter(commit__commit_hash='0000300003000030000300003000030000300003', branch__name='branch-1').first())
self.assertIsNotNone(GitBranchTrailEntry.objects.filter(commit__commit_hash='0000500005000050000500005000050000500005', branch__name='branch-1').first())
def test_feed_two_times_populate_fluctuation_waivers(self):
commit_time = str(timezone.now().isoformat())
commit1 = FeederTestHelper.create_commit(1, [], 'user1', 'user1@test.com', commit_time, commit_time)
commit2 = FeederTestHelper.create_commit(2, [1], 'user1', 'user1@test.com', commit_time, commit_time)
commit3 = FeederTestHelper.create_commit(3, [1], 'user2', 'user2@test.com', commit_time, commit_time)
branch1 = FeederTestHelper.create_branch('master', 2, 'master', 2, 1, [2, 1], 'merge-target-content')
branch2 = FeederTestHelper.create_branch('branch-1', 3, 'master', 2, 1, [3, 1], 'merge-target-content-2')
feed_data = {}
feed_data['commits'] = []
feed_data['commits'].append(commit1)
feed_data['commits'].append(commit2)
feed_data['commits'].append(commit3)
feed_data['branches'] = []
feed_data['branches'].append(branch1)
feed_data['branches'].append(branch2)
post_data = FeederTestHelper.create_feed_data(feed_data)
resp = self.client.post(
'/main/feed/commit/project/{0}/'.format(self.git_project1.id),
data = json.dumps(post_data),
content_type='application/json')
res.check_cross_origin_headers(self, resp)
resp_obj = json.loads(resp.content)
self.assertEqual(200, resp_obj['status'])
self.assertEqual(2, BenchmarkFluctuationWaiverEntry.objects.all().count())
self.assertEqual(1, BenchmarkFluctuationWaiverEntry.objects.filter(git_user__name='user1').count())
self.assertEqual(1, BenchmarkFluctuationWaiverEntry.objects.filter(git_user__name='user2').count())
commit4 = FeederTestHelper.create_commit(4, [2], 'user1', 'user1@test.com', commit_time, commit_time)
commit5 = FeederTestHelper.create_commit(5, [3], 'user3', 'user3@test.com', commit_time, commit_time)
branch1 = FeederTestHelper.create_branch('master', 4, 'master', 4, 1, [4, 2, 1], 'merge-target-content-3')
branch2 = FeederTestHelper.create_branch('branch-1', 5, 'master', 4, 1, [5, 3, 1], 'merge-target-content-4')
feed_data = {}
feed_data['commits'] = []
feed_data['commits'].append(commit4)
feed_data['commits'].append(commit5)
feed_data['branches'] = []
feed_data['branches'].append(branch1)
feed_data['branches'].append(branch2)
post_data = FeederTestHelper.create_feed_data(feed_data)
resp = self.client.post(
'/main/feed/commit/project/{0}/'.format(self.git_project1.id),
data = json.dumps(post_data),
content_type='application/json')
res.check_cross_origin_headers(self, resp)
resp_obj = json.loads(resp.content)
self.assertEqual(200, resp_obj['status'])
self.assertEqual(3, BenchmarkFluctuationWaiverEntry.objects.all().count())
self.assertEqual(1, BenchmarkFluctuationWaiverEntry.objects.filter(git_user__name='user1').count())
self.assertEqual(1, BenchmarkFluctuationWaiverEntry.objects.filter(git_user__name='user2').count())
self.assertEqual(1, BenchmarkFluctuationWaiverEntry.objects.filter(git_user__name='user3').count())
def test_feed_commit_create_benchmark_execution_because_benchmark_definition(self):
command_group = CommandGroupEntry.objects.create()
command_set = CommandSetEntry.objects.create(
group=command_group
)
bluesteel_layout = BluesteelLayoutEntry.objects.create(
name='Layout',
active=True,
project_index_path=0,
)
bluesteel_project = BluesteelProjectEntry.objects.create(
name='Project',
order=0,
layout=bluesteel_layout,
command_group=command_group,
git_project=self.git_project1,
)
benchmark_definition1 = BenchmarkDefinitionEntry.objects.create(
name='BenchmarkDefinition1',
layout=bluesteel_layout,
project=bluesteel_project,
command_set=command_set,
revision=28,
)
benchmark_definition2 = BenchmarkDefinitionEntry.objects.create(
name='BenchmarkDefinition2',
layout=bluesteel_layout,
project=bluesteel_project,
command_set=command_set,
revision=3,
)
worker1 = WorkerEntry.objects.create(
name='worker-name-1',
uuid='uuid-worker-1',
operative_system='osx',
description='long-description-1',
user=self.user1,
git_feeder=False
)
worker2 = WorkerEntry.objects.create(
name='worker-name-2',
uuid='uuid-worker-2',
operative_system='osx',
description='long-description-2',
user=self.user2,
git_feeder=False
)
commit_time = str(timezone.now().isoformat())
commit1 = FeederTestHelper.create_commit(1, [], 'user1', 'user1@test.com', commit_time, commit_time)
branch1 = FeederTestHelper.create_branch('master', 1, 'master', 1, 1, [1], 'merge-target-content')
feed_data = {}
feed_data['commits'] = []
feed_data['commits'].append(commit1)
feed_data['branches'] = []
feed_data['branches'].append(branch1)
post_data = FeederTestHelper.create_feed_data(feed_data)
self.assertEqual(0, BenchmarkExecutionEntry.objects.all().count())
resp = self.client.post(
'/main/feed/commit/project/{0}/'.format(self.git_project1.id),
data = json.dumps(post_data),
content_type='application/json')
res.check_cross_origin_headers(self, resp)
resp_obj = json.loads(resp.content)
self.assertEqual(200, resp_obj['status'])
self.assertEqual(1, GitCommitEntry.objects.all().count())
self.assertEqual(1, GitBranchEntry.objects.all().count())
branch_entry = GitBranchEntry.objects.all().first()
self.assertEqual('0000100001000010000100001000010000100001', branch_entry.commit.commit_hash)
self.assertEqual('master', branch_entry.name)
self.assertEqual(4, BenchmarkExecutionEntry.objects.all().count())
self.assertEqual(1, BenchmarkExecutionEntry.objects.filter(commit__commit_hash='0000100001000010000100001000010000100001', worker=worker1, definition=benchmark_definition1).count())
self.assertEqual(1, BenchmarkExecutionEntry.objects.filter(commit__commit_hash='0000100001000010000100001000010000100001', worker=worker1, definition=benchmark_definition2).count())
self.assertEqual(1, BenchmarkExecutionEntry.objects.filter(commit__commit_hash='0000100001000010000100001000010000100001', worker=worker2, definition=benchmark_definition1).count())
self.assertEqual(1, BenchmarkExecutionEntry.objects.filter(commit__commit_hash='0000100001000010000100001000010000100001', worker=worker2, definition=benchmark_definition2).count())
def test_view_josn_purge_all_feed_reports_from_a_worker(self):
git_project2 = GitProjectEntry.objects.create(url='http://test/2/')
worker1 = WorkerEntry.objects.create(
name='worker-name-1',
uuid='uuid-worker-1',
operative_system='osx',
description='long-description-1',
user=self.user1,
git_feeder=False
)
command_group_1 = CommandGroupEntry.objects.create()
command_group_2 = CommandGroupEntry.objects.create()
command_group_3 = CommandGroupEntry.objects.create()
command_group_4 = CommandGroupEntry.objects.create()
feed_1_1 = FeedEntry.objects.create(command_group=command_group_1, git_project=self.git_project1, worker=worker1)
feed_1_2 = FeedEntry.objects.create(command_group=command_group_2, git_project=self.git_project1, worker=worker1)
feed_2_1 = FeedEntry.objects.create(command_group=command_group_3, git_project=git_project2, worker=worker1)
feed_2_2 = FeedEntry.objects.create(command_group=command_group_4, git_project=git_project2, worker=worker1)
self.assertEqual(4, FeedEntry.objects.all().count())
self.assertEqual(1, CommandGroupEntry.objects.filter(id=command_group_1.id).count())
self.assertEqual(1, CommandGroupEntry.objects.filter(id=command_group_2.id).count())
self.assertEqual(1, CommandGroupEntry.objects.filter(id=command_group_3.id).count())
self.assertEqual(1, CommandGroupEntry.objects.filter(id=command_group_4.id).count())
resp = self.client.post(
'/main/feed/report/worker/{0}/purge/all/'.format(worker1.id),
data = '',
content_type='application/json')
res.check_cross_origin_headers(self, resp)
resp_obj = json.loads(resp.content)
self.assertEqual(200, resp_obj['status'])
self.assertEqual(0, FeedEntry.objects.all().count())
self.assertEqual(0, FeedEntry.objects.filter(git_project=self.git_project1, worker=worker1).count())
self.assertEqual(0, FeedEntry.objects.filter(git_project=git_project2, worker=worker1).count())
self.assertEqual(0, CommandGroupEntry.objects.filter(id=command_group_1.id).count())
self.assertEqual(0, CommandGroupEntry.objects.filter(id=command_group_2.id).count())
self.assertEqual(0, CommandGroupEntry.objects.filter(id=command_group_3.id).count())
self.assertEqual(0, CommandGroupEntry.objects.filter(id=command_group_4.id).count())
def test_view_purge_old_feed_reports_from_a_worker(self):
git_project2 = GitProjectEntry.objects.create(url='http://test/2/')
worker1 = WorkerEntry.objects.create(
name='worker-name-1',
uuid='uuid-worker-1',
operative_system='osx',
description='long-description-1',
user=self.user1,
git_feeder=False
)
command_group_1 = CommandGroupEntry.objects.create()
command_group_2 = CommandGroupEntry.objects.create()
command_group_3 = CommandGroupEntry.objects.create()
command_group_4 = CommandGroupEntry.objects.create()
feed_1_1 = FeedEntry.objects.create(command_group=command_group_1, git_project=self.git_project1, worker=worker1)
feed_1_2 = FeedEntry.objects.create(command_group=command_group_2, git_project=self.git_project1, worker=worker1)
feed_2_1 = FeedEntry.objects.create(command_group=command_group_3, git_project=git_project2, worker=worker1)
feed_2_2 = FeedEntry.objects.create(command_group=command_group_4, git_project=git_project2, worker=worker1)
self.assertEqual(4, FeedEntry.objects.all().count())
self.assertEqual(1, CommandGroupEntry.objects.filter(id=command_group_1.id).count())
self.assertEqual(1, CommandGroupEntry.objects.filter(id=command_group_2.id).count())
self.assertEqual(1, CommandGroupEntry.objects.filter(id=command_group_3.id).count())
self.assertEqual(1, CommandGroupEntry.objects.filter(id=command_group_4.id).count())
resp = self.client.post(
'/main/feed/report/worker/{0}/purge/keep/2/'.format(worker1.id),
data = '',
content_type='application/json')
res.check_cross_origin_headers(self, resp)
resp_obj = json.loads(resp.content)
self.assertEqual(200, resp_obj['status'])
self.assertEqual(2, FeedEntry.objects.all().count())
self.assertEqual(0, FeedEntry.objects.filter(git_project=self.git_project1, worker=worker1).count())
self.assertEqual(2, FeedEntry.objects.filter(git_project=git_project2, worker=worker1).count())
self.assertEqual(0, CommandGroupEntry.objects.filter(id=command_group_1.id).count())
self.assertEqual(0, CommandGroupEntry.objects.filter(id=command_group_2.id).count())
self.assertEqual(1, CommandGroupEntry.objects.filter(id=command_group_3.id).count())
self.assertEqual(1, CommandGroupEntry.objects.filter(id=command_group_4.id).count())
def test_view_send_notifications_on_fluctuation_greater_than_45_percent(self):
BenchmarkFluctuationWaiverEntry.objects.create(git_user=self.git_user1, notification_allowed=True);
worker1 = WorkerEntry.objects.create(name='worker-name-1', uuid='uuid-worker-1', operative_system='osx', description='long-description-1', user=self.user1, git_feeder=False)
command_group = CommandGroupEntry.objects.create()
command_set = CommandSetEntry.objects.create(group=command_group)
bluesteel_layout = BluesteelLayoutEntry.objects.create(name='Layout', active=True, project_index_path=0)
bluesteel_project = BluesteelProjectEntry.objects.create(name='Project', order=0, layout=bluesteel_layout, command_group=command_group, git_project=self.git_project1)
benchmark_definition1 = BenchmarkDefinitionEntry.objects.create(name='BenchmarkDefinition1', layout=bluesteel_layout, project=bluesteel_project, command_set=command_set, revision=28)
benchmark_definition1.max_fluctuation_percent = 45
benchmark_definition1.save()
commit0 = GitCommitEntry.objects.create(project=self.git_project1, commit_hash='0000000000000000000000000000000000000000', author=self.git_user1, author_date=timezone.now(), committer=self.git_user1, committer_date=timezone.now())
commit1 = GitCommitEntry.objects.create(project=self.git_project1, commit_hash='0000100001000010000100001000010000100001', author=self.git_user1, author_date=timezone.now(), committer=self.git_user1, committer_date=timezone.now())
branch_test = GitBranchEntry.objects.create(project=self.git_project1, name='branch-test', commit=commit1)
trail0 = GitBranchTrailEntry.objects.create(project=self.git_project1, branch=branch_test, commit=commit0, order=5)
trail1 = GitBranchTrailEntry.objects.create(project=self.git_project1, branch=branch_test, commit=commit1, order=4)
parent0_1 = GitParentEntry.objects.create(project=self.git_project1, parent=commit0, son=commit1, order=0)
report0 = CommandSetEntry.objects.create(group=None)
report1 = CommandSetEntry.objects.create(group=None)
com0 = CommandEntry.objects.create(command_set=report0, command='command0', order=0)
out0 = json.dumps([{'visual_type' : 'vertical_bars', 'id' : 'id1', 'data' : [1.0, 1.0, 1.0, 1.0, 1.0]}])
CommandResultEntry.objects.create(command=com0, out=out0, error='no error', status=0, start_time=timezone.now(), finish_time=timezone.now())
benchmark_execution0 = BenchmarkExecutionEntry.objects.create(definition=benchmark_definition1, commit=commit0, worker=worker1, report=report0, invalidated=False, revision_target=28, status=BenchmarkExecutionEntry.READY)
benchmark_execution1 = BenchmarkExecutionEntry.objects.create(definition=benchmark_definition1, commit=commit1, worker=worker1, report=report1, invalidated=False, revision_target=28, status=BenchmarkExecutionEntry.READY)
self.assertEqual(0, StackedMailEntry.objects.all().count())
com_data = {}
com_data['command'] = 'command-1'
com_data['result'] = {}
com_data['result']['status'] = 0
com_data['result']['out'] = [{'visual_type' : 'vertical_bars', 'id' : 'id1', 'data' : [2.0, 2.0, 2.0, 2.0, 2.0]}]
com_data['result']['error'] = ''
com_data['result']['start_time'] = str(timezone.now())
com_data['result']['finish_time'] = str(timezone.now())
bench_data = {}
bench_data['command_set'] = []
bench_data['command_set'].append(com_data)
resp = self.client.post(
'/main/execution/{0}/save/'.format(benchmark_execution1.id),
data = json.dumps(bench_data),
content_type='application/json')
res.check_cross_origin_headers(self, resp)
resp_obj = json.loads(resp.content)
self.assertEqual(200, resp_obj['status'])
self.assertEqual(1, StackedMailEntry.objects.all().count())
self.assertEqual(1, StackedMailEntry.objects.all().count())
email = StackedMailEntry.objects.all().first()
self.assertTrue('Commit Hash: 0000100001000010000100001000010000100001' in email.content)
self.assertTrue('Commit Author: user1' in email.content)
self.assertTrue('Commit Author Email: user1@test.com' in email.content)
self.assertTrue('Worker Name: worker-name-1' in email.content)
self.assertTrue('Worker Operative System: osx' in email.content)
self.assertTrue('Benchmark Definition Name: BenchmarkDefinition1' in email.content)
self.assertTrue('Parent commit: 0000000' in email.content)
self.assertTrue('Parent median result: 1.0' in email.content)
self.assertTrue('Parent fluctuation vs Current commit: -50.0%' in email.content)
self.assertTrue('Current commit: 0000100' in email.content)
self.assertTrue('Current median result: 2.0' in email.content)
self.assertTrue('http://testserver/main/execution/2/window/' in email.content)
def test_view_not_sending_notifications_because_fluctuation_not_enough(self):
BenchmarkFluctuationWaiverEntry.objects.create(git_user=self.git_user1, notification_allowed=True);
worker1 = WorkerEntry.objects.create(name='worker-name-1', uuid='uuid-worker-1', operative_system='osx', description='long-description-1', user=self.user1, git_feeder=False)
command_group = CommandGroupEntry.objects.create()
command_set = CommandSetEntry.objects.create(group=command_group)
bluesteel_layout = BluesteelLayoutEntry.objects.create(name='Layout', active=True, project_index_path=0)
bluesteel_project = BluesteelProjectEntry.objects.create(name='Project', order=0, layout=bluesteel_layout, command_group=command_group, git_project=self.git_project1)
benchmark_definition1 = BenchmarkDefinitionEntry.objects.create(name='BenchmarkDefinition1', layout=bluesteel_layout, project=bluesteel_project, command_set=command_set, revision=28)
benchmark_definition1.max_fluctuation_percent = 15
benchmark_definition1.save()
commit0 = GitCommitEntry.objects.create(project=self.git_project1, commit_hash='0000000000000000000000000000000000000000', author=self.git_user1, author_date=timezone.now(), committer=self.git_user1, committer_date=timezone.now())
commit1 = GitCommitEntry.objects.create(project=self.git_project1, commit_hash='0000100001000010000100001000010000100001', author=self.git_user1, author_date=timezone.now(), committer=self.git_user1, committer_date=timezone.now())
branch_test = GitBranchEntry.objects.create(project=self.git_project1, name='branch-test', commit=commit1)
trail0 = GitBranchTrailEntry.objects.create(project=self.git_project1, branch=branch_test, commit=commit0, order=5)
trail1 = GitBranchTrailEntry.objects.create(project=self.git_project1, branch=branch_test, commit=commit1, order=4)
parent0_1 = GitParentEntry.objects.create(project=self.git_project1, parent=commit0, son=commit1, order=0)
report0 = CommandSetEntry.objects.create(group=None)
report1 = CommandSetEntry.objects.create(group=None)
com0 = CommandEntry.objects.create(command_set=report0, command='command0', order=0)
out0 = json.dumps([{'visual_type' : 'vertical_bars', 'id' : 'id1', 'data' : [1.0, 1.0, 1.0, 1.0, 1.0]}])
CommandResultEntry.objects.create(command=com0, out=out0, error='no error', status=0, start_time=timezone.now(), finish_time=timezone.now())
benchmark_execution0 = BenchmarkExecutionEntry.objects.create(definition=benchmark_definition1, commit=commit0, worker=worker1, report=report0, invalidated=False, revision_target=28, status=BenchmarkExecutionEntry.READY)
benchmark_execution1 = BenchmarkExecutionEntry.objects.create(definition=benchmark_definition1, commit=commit1, worker=worker1, report=report1, invalidated=False, revision_target=28, status=BenchmarkExecutionEntry.READY)
self.assertEqual(0, StackedMailEntry.objects.all().count())
com_data = {}
com_data['command'] = 'command-1'
com_data['result'] = {}
com_data['result']['status'] = 0
com_data['result']['out'] = [{'visual_type' : 'vertical_bars', 'id' : 'id1', 'data' : [1.1, 1.1, 1.1, 1.1, 1.1]}]
com_data['result']['error'] = ''
com_data['result']['start_time'] = str(timezone.now())
com_data['result']['finish_time'] = str(timezone.now())
bench_data = {}
bench_data['command_set'] = []
bench_data['command_set'].append(com_data)
resp = self.client.post(
'/main/execution/{0}/save/'.format(benchmark_execution1.id),
data = json.dumps(bench_data),
content_type='application/json')
res.check_cross_origin_headers(self, resp)
resp_obj = json.loads(resp.content)
self.assertEqual(200, resp_obj['status'])
self.assertEqual(0, StackedMailEntry.objects.all().count())
def test_view_purge_when_feeding_reports(self):
git_project2 = GitProjectEntry.objects.create(url='http://test/2/')
worker1 = WorkerEntry.objects.create(
name='worker-name-1',
uuid='uuid-worker-1',
operative_system='osx',
description='long-description-1',
user=self.user1,
git_feeder=False,
max_feed_reports=3,
)
command_group_1 = CommandGroupEntry.objects.create()
command_group_2 = CommandGroupEntry.objects.create()
command_group_3 = CommandGroupEntry.objects.create()
command_group_4 = CommandGroupEntry.objects.create()
command_group_5 = CommandGroupEntry.objects.create()
feed_1_1 = FeedEntry.objects.create(command_group=command_group_1, git_project=self.git_project1, worker=worker1)
feed_1_2 = FeedEntry.objects.create(command_group=command_group_2, git_project=self.git_project1, worker=worker1)
feed_2_1 = FeedEntry.objects.create(command_group=command_group_3, git_project=git_project2, worker=worker1)
feed_2_2 = FeedEntry.objects.create(command_group=command_group_4, git_project=git_project2, worker=worker1)
feed_2_2 = FeedEntry.objects.create(command_group=command_group_5, git_project=git_project2, worker=worker1)
self.assertEqual(5, FeedEntry.objects.all().count())
self.assertEqual(1, CommandGroupEntry.objects.filter(id=command_group_1.id).count())
self.assertEqual(1, CommandGroupEntry.objects.filter(id=command_group_2.id).count())
self.assertEqual(1, CommandGroupEntry.objects.filter(id=command_group_3.id).count())
self.assertEqual(1, CommandGroupEntry.objects.filter(id=command_group_4.id).count())
self.assertEqual(1, CommandGroupEntry.objects.filter(id=command_group_5.id).count())
post_data = {}
post_data['reports'] = FeederTestHelper.get_default_report(1)
self.client.login(username='user1@test.com', password='pass1')
resp = self.client.post(
'/main/feed/report/project/{0}/'.format(self.git_project1.id),
data = json.dumps(post_data),
content_type='application/json')
res.check_cross_origin_headers(self, resp)
resp_obj = json.loads(resp.content)
self.assertEqual(200, resp_obj['status'])
self.assertEqual(3, FeedEntry.objects.all().count())
self.assertEqual(1, FeedEntry.objects.filter(git_project=self.git_project1, worker=worker1).count())
self.assertEqual(2, FeedEntry.objects.filter(git_project=git_project2, worker=worker1).count())
self.assertEqual(0, CommandGroupEntry.objects.filter(id=command_group_1.id).count())
self.assertEqual(0, CommandGroupEntry.objects.filter(id=command_group_2.id).count())
self.assertEqual(0, CommandGroupEntry.objects.filter(id=command_group_3.id).count())
self.assertEqual(1, CommandGroupEntry.objects.filter(id=command_group_4.id).count())
self.assertEqual(1, CommandGroupEntry.objects.filter(id=command_group_5.id).count())
| 52.236709
| 238
| 0.703662
| 4,668
| 41,267
| 6.01928
| 0.06084
| 0.054452
| 0.036302
| 0.024664
| 0.888426
| 0.863585
| 0.828529
| 0.810734
| 0.79995
| 0.79479
| 0
| 0.06448
| 0.170209
| 41,267
| 789
| 239
| 52.302915
| 0.756067
| 0.000654
| 0
| 0.701639
| 0
| 0
| 0.127959
| 0.049791
| 0
| 0
| 0
| 0
| 0.214754
| 1
| 0.027869
| false
| 0.008197
| 0.05082
| 0
| 0.080328
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 7
|
47920e8fc6771cad129b92c22b005ea6aa82f616
| 2,114
|
py
|
Python
|
NSIX.py
|
nfs-tech-bd/NSIX
|
c6b928a7f2eb5928d3979dffb4de0321ee0fc856
|
[
"Apache-2.0"
] | 5
|
2021-02-22T17:12:14.000Z
|
2022-01-18T03:55:34.000Z
|
NSIX.py
|
nfs-tech-bd/NSIX
|
c6b928a7f2eb5928d3979dffb4de0321ee0fc856
|
[
"Apache-2.0"
] | null | null | null |
NSIX.py
|
nfs-tech-bd/NSIX
|
c6b928a7f2eb5928d3979dffb4de0321ee0fc856
|
[
"Apache-2.0"
] | 1
|
2021-05-11T23:38:24.000Z
|
2021-05-11T23:38:24.000Z
|
import marshal,zlib,base64
exec(marshal.loads(zlib.decompress(base64.b64decode("eJytVm1T20YQXvkNLCAx5IU2L+2lX+rMFBtI2rw0k8YYE1yCzcj20CjpMEI62zJ6cXTymGTIl6b/oB867e/ol/6j/ot29ySBKWTaZnpGx+7t3t7dPrt7Z0Lc0vg9wU/cws7CPwUcAB2AAwwUsFLwTqHBTAe7VpHU7T+xNUR2FsBMzGQTMz8rAK//OGUpphXQlYROgZ5K6DTo6YTOgJ5J6Czo2YTOgZ5L6CnQpxJ6GvTphM6Dnk9oFXQ1oWdAn0noWdBnE3oO9LmEvgD6BUmnwLkIbgH0AijEp8GZB3cB9IWIz5AN9xLol0DZ9e5Dhl+GAxWCzxRs5LIrYGXJZQmTi5gUDK6CNSUZT4Hv+CJwJRIpMPgIrOnJSXnJWCr8iA78GCw8wjWwcPPXwcJt3wDrghTdBOuiJD4BqyCJT4EzsObhIAXBTwq/Ce8AF5SHbBUXjsErIkoQqti1+wE3rB3fd6Kxi9hVfc/jZmj7Xi0I/CASTGG3FvhjwQNxBemhPVxltidCw3FYwF+NuAiFuPp3icvNvuHZb7gtrWSwG4Xd+yEFjOlwIzBpPBVHYpVCiLrDR3Akd724/v0yvFUglOE4SMFR5LaYT8sDotlBNo5V5dUM7CJUUaw2BJlXe7/+Qu33b4o52gVJxGsREiNCyx+Fcj/jwA65pLrOSPTlZkPbjYaEw/mwSBsNyeQb2fMiBb/sBImsntQdGI7hFWhwmkTKvFJQ5hTxLaqo6vUXK18/WHXzeYZtj76oRf+JjVTuRCpldoRfmaTlMkqRR3bSCo7S0Euc/FKqHtG0CSM4hkNlssDKZamCU05MlMkoWqf1pRYOHO0dxVtdcdlJa25s1Kv1yjNWba7XNLa2cre6tbTVWHm6eY8VG5WNeottdCrrt9Vk+WRipdNublfa9SqrVKvNTqPNqlqlukU2njOp+5V7Mj+efs9d+s8tnvqlq7LK+na9ES3/kE0YZxuVam2t2dySgoQpVZvbZalVIq3Sg+W7Knve7LQ7a7XYxEaLtWvVTbaGNp7W25udtdh4xEQWNlpLpLSESrSTO6uuqn7IMdg/NDGDoeV1xVKIKba0b8lw9YR9KItyklUyow4paqN6HmdPCp5uYhpEKfMWKNlwPlWNu4AhLKvGk3Pm4ARUXcS6uIgGcjCYopp4kpBx8QunpQmZhpRkDY02J6gAsVqjjah3WjWtUdmuofNkkrDiOPC93m1ZJsQylQCrt+QPucf6YTgUD8vl8Xhc6hom3/f9g5Lpu2XP6Nqi1B0ZFoFVzNMp6cx+nNmY4dyVbnH8nh+SPDDGe7Y3xISnYXMkApnHQ42Emppk9yiqeVQnUOVMkmtUAFeIvyp9PadklVn8qSmi5uTvLAo/nIuClUCRPgPFg2Moau+biLMkHpkJPLKTeOTO4DF1gsfn5+IxEVOsaPpBgHfB7Unsdiqt1m5TW/+fsdPo7tGobGrz1C0kF4Y5HFsaRfsJStplguQYJFQ4CxKpPCb+2jFIkzCdAgomgaq/Hyj0N3aZGLLY09Htk5t0e3TTS39Pf5i/2bn+pvyeULpJSnEdf+b3bI+1RqbJheiOHOf6i2VXPEKNuBTv4HUrONs17JCtsm3bG4VcfMEqeEvvGOaB0eOCVQLOqn3kbK9XohZhSlAMX4d931tlJa2GhbVWci2Z2n9Hbe4UTieonIKGwGwQX5iAhn4ETUge29tzDdvb2yveOl4jm1iWCpaBdzVdzjmZ1p7lu/KN0jdE37H3ZSEIuEz6UL5x8EQymgbC9+SkUeCQImmYGJY2J45M9Hg4NISQkuP3i1w0eedIZr/vhyE+nEIKNHfkhPYw8Mn55LshCjQ6XHhpYl6JH5p8SC8roZFjNfmeIG/U3aEfhPK9FTlT1iJ5Nu74hhUSBIKHFu8auBT3TJ9OFLlbOp7W0Rapu5IkiPRz7E7PcDm681w8pMoj17dGDn9MQ4KkaeU3fLcUlMtxoswrKv1P52/k0jklp+Tn8oW8OqtEjyJynGsEom848g1Emxb/YrlsMj6r/AXeJ2O+"))))
| 704.666667
| 2,086
| 0.962157
| 70
| 2,114
| 29.057143
| 0.957143
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.151113
| 0.001419
| 2,114
| 2
| 2,087
| 1,057
| 0.812411
| 0
| 0
| 0
| 0
| 0.5
| 0.959319
| 0.959319
| 0
| 1
| 0
| 0
| 0
| 1
| 0
| true
| 0
| 0.5
| 0
| 0.5
| 0
| 0
| 0
| 1
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 1
| 1
| 1
| null | 1
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 0
| 0
| 0
|
0
| 10
|
9a0f2bd3f561f3ba7ec6136ee95be224a7910eb0
| 109
|
py
|
Python
|
deli_counter/test/test_authn_db.py
|
sandwichcloud/deli-counter
|
e72ee3966f4457ffbd1d432b3516965075c7c86e
|
[
"MIT"
] | 1
|
2017-10-19T09:25:29.000Z
|
2017-10-19T09:25:29.000Z
|
deli_counter/test/test_authn_db.py
|
sandwichcloud/deli-counter
|
e72ee3966f4457ffbd1d432b3516965075c7c86e
|
[
"MIT"
] | null | null | null |
deli_counter/test/test_authn_db.py
|
sandwichcloud/deli-counter
|
e72ee3966f4457ffbd1d432b3516965075c7c86e
|
[
"MIT"
] | null | null | null |
from deli_counter.test.base import DeliTestCase
class TestAuthNDB(DeliTestCase):
pass
# TODO: do this
| 13.625
| 47
| 0.770642
| 14
| 109
| 5.928571
| 0.928571
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.165138
| 109
| 7
| 48
| 15.571429
| 0.912088
| 0.119266
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.142857
| 0
| 1
| 0
| true
| 0.333333
| 0.333333
| 0
| 0.666667
| 0
| 1
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 1
| 0
| 0
| 0
| 1
| 1
| 1
| 0
| 1
| 0
|
0
| 7
|
9a59a943e214254dcc428d4ef6da0e8bf9f013ce
| 4,856
|
py
|
Python
|
sympycore/heads/relational.py
|
radovankavicky/pymaclab
|
21da758f64ed0b62969c9289576f677e977cfd98
|
[
"Apache-2.0"
] | 96
|
2015-01-25T05:59:56.000Z
|
2021-12-29T14:05:22.000Z
|
sympycore/heads/relational.py
|
1zinnur9/pymaclab
|
21da758f64ed0b62969c9289576f677e977cfd98
|
[
"Apache-2.0"
] | 3
|
2015-12-17T19:25:46.000Z
|
2018-06-19T07:05:20.000Z
|
sympycore/heads/relational.py
|
1zinnur9/pymaclab
|
21da758f64ed0b62969c9289576f677e977cfd98
|
[
"Apache-2.0"
] | 36
|
2016-01-31T15:22:01.000Z
|
2021-03-29T07:03:07.000Z
|
from __future__ import with_statement
__all__ = ['EQ', 'NE', 'LT', 'LE', 'GT', 'GE']
from ..core import init_module, classes, SymbolicEquality
init_module.import_heads()
init_module.import_numbers()
from .base import Head, BinaryHead, heads_precedence, Expr
class RelationalHead(BinaryHead):
def walk(self, func, cls, data, target):
lhs, rhs = data
lhs1 = lhs.head.walk(func, classes.Calculus, lhs.data, lhs)
rhs1 = rhs.head.walk(func, classes.Calculus, rhs.data, rhs)
if lhs1 is lhs and rhs1 is rhs:
return func(cls, data, target)
r = self.new(cls, (lhs1, rhs1))
return func(cls, r.head, r.data, r)
class EqHead(RelationalHead):
op_symbol = '=='
op_mth = '__eq__'
def new(self, cls, (lhs, rhs)):
if lhs.head is NUMBER and rhs.head is NUMBER:
return cls(lhs.data == rhs.data)
return cls(self, (lhs, rhs))
def reevaluate(self, cls, (lhs, rhs)):
with SymbolicEquality(type(lhs), type(rhs)):
return lhs == rhs
def __repr__(self): return 'EQ'
def nonzero(self, cls, (lhs, rhs)):
if isinstance(lhs, Expr):
lhs = lhs.as_lowlevel()
if isinstance(rhs, Expr):
rhs = rhs.as_lowlevel()
if isinstance(lhs, numbertypes) and isinstance(rhs, numbertypes):
return lhs == rhs
return True
class NeHead(RelationalHead):
op_symbol = '!='
op_mth = '__ne__'
def new(self, cls, (lhs, rhs)):
if lhs.head is NUMBER and rhs.head is NUMBER:
return cls(lhs.data != rhs.data)
return cls(self, (lhs, rhs))
def reevaluate(self, cls, (lhs, rhs)):
with SymbolicEquality(type(lhs), type(rhs)):
return lhs != rhs
def __repr__(self): return 'NE'
def nonzero(self, cls, (lhs, rhs)):
if isinstance(lhs, Expr):
lhs = lhs.as_lowlevel()
if isinstance(rhs, Expr):
rhs = rhs.as_lowlevel()
if isinstance(lhs, numbertypes) and isinstance(rhs, numbertypes):
return lhs != rhs
return True
class LtHead(RelationalHead):
op_symbol = '<'
op_mth = '__lt__'
def __repr__(self): return 'LT'
def new(self, cls, (lhs, rhs), evaluate=True):
if lhs.head is NUMBER and rhs.head is NUMBER:
return cls(lhs.data < rhs.data)
return cls(self, (lhs, rhs))
def reevaluate(self, cls, (lhs, rhs)):
return lhs < rhs
def nonzero(self, cls, (lhs, rhs)):
if isinstance(lhs, Expr):
lhs = lhs.as_lowlevel()
if isinstance(rhs, Expr):
rhs = rhs.as_lowlevel()
if isinstance(lhs, numbertypes) and isinstance(rhs, numbertypes):
return lhs < rhs
return True
class LeHead(RelationalHead):
op_symbol = '<='
op_mth = '__le__'
def new(self, cls, (lhs, rhs)):
if lhs.head is NUMBER and rhs.head is NUMBER:
return cls(lhs.data <= rhs.data)
return cls(self, (lhs, rhs))
def reevaluate(self, cls, (lhs, rhs)):
return lhs <= rhs
def __repr__(self): return 'LE'
def nonzero(self, cls, (lhs, rhs)):
if isinstance(lhs, Expr):
lhs = lhs.as_lowlevel()
if isinstance(rhs, Expr):
rhs = rhs.as_lowlevel()
if isinstance(lhs, numbertypes) and isinstance(rhs, numbertypes):
return lhs <= rhs
return True
class GtHead(RelationalHead):
op_symbol = '>'
op_mth = '__gt__'
def new(self, cls, (lhs, rhs)):
if lhs.head is NUMBER and rhs.head is NUMBER:
return cls(lhs.data > rhs.data)
return cls(self, (lhs, rhs))
def reevaluate(self, cls, (lhs, rhs)):
return lhs > rhs
def __repr__(self): return 'GT'
def nonzero(self, cls, (lhs, rhs)):
if isinstance(lhs, Expr):
lhs = lhs.as_lowlevel()
if isinstance(rhs, Expr):
rhs = rhs.as_lowlevel()
if isinstance(lhs, numbertypes) and isinstance(rhs, numbertypes):
return lhs > rhs
return True
class GeHead(RelationalHead):
op_symbol = '>='
op_mth = '__ge__'
def new(self, cls, (lhs, rhs)):
if lhs.head is NUMBER and rhs.head is NUMBER:
return cls(lhs.data >= rhs.data)
return cls(self, (lhs, rhs))
def reevaluate(self, cls, (lhs, rhs)):
return lhs >= rhs
def __repr__(self): return 'GE'
def nonzero(self, cls, (lhs, rhs)):
if isinstance(lhs, Expr):
lhs = lhs.as_lowlevel()
if isinstance(rhs, Expr):
rhs = rhs.as_lowlevel()
if isinstance(lhs, numbertypes) and isinstance(rhs, numbertypes):
return lhs >= rhs
return True
EQ = EqHead()
NE = NeHead()
LT = LtHead()
LE = LeHead()
GT = GtHead()
GE = GeHead()
| 27.748571
| 73
| 0.578048
| 625
| 4,856
| 4.352
| 0.1024
| 0.081618
| 0.066176
| 0.086029
| 0.811029
| 0.731618
| 0.724632
| 0.724632
| 0.724632
| 0.724632
| 0
| 0.001758
| 0.297158
| 4,856
| 174
| 74
| 27.908046
| 0.795195
| 0
| 0
| 0.603053
| 0
| 0
| 0.014418
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | null | 0
| 0.038168
| null | null | 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 7
|
7bf92a69b195e83bcb5fbfb7a69866db2e08d06a
| 2,084
|
py
|
Python
|
src/sort/bubble_sort.py
|
grayondream/algorithm-forth
|
c2c21419ec7c42b10f229ab41ac10edf7afbe956
|
[
"Apache-2.0"
] | null | null | null |
src/sort/bubble_sort.py
|
grayondream/algorithm-forth
|
c2c21419ec7c42b10f229ab41ac10edf7afbe956
|
[
"Apache-2.0"
] | null | null | null |
src/sort/bubble_sort.py
|
grayondream/algorithm-forth
|
c2c21419ec7c42b10f229ab41ac10edf7afbe956
|
[
"Apache-2.0"
] | null | null | null |
def bubble_sort(l, start, end, hook_func=None):
'''
@brief 冒泡排序算法,排序区间[start, end]
@param l 需要进行排序的list
@param start 开始位置
@param end 结束位置
@param hook_func hook函数这里主要是用作可视化
'''
count = 0
for i in range(start, end + 1):
for j in range(start, end - i):
if hook_func is not None:
hook_func(l, i, j, count)
count += 1
if l[j] > l[j + 1]:
l[j + 1], l[j] = l[j], l[j + 1]
return l
def bubble_sort_II(l, start, end, hook_func=None):
'''
@brief 冒泡排序算法,排序区间[start, end]
@param l 需要进行排序的list
@param start 开始位置
@param end 结束位置
@param hook_func hook函数这里主要是用作可视化
@note 使用sorted标志位记录后续的位置是否已经排序,按照冒泡排序算法的思路如果已经后面的位置未经过交换元素,后面一定已经有序
'''
count = 0
sorted = False
for i in range(start, end + 1):
sorted = True
for j in range(start, end - i):
if hook_func is not None:
hook_func(l, i, j, count)
count += 1
if l[j] > l[j + 1]:
l[j + 1], l[j] = l[j], l[j + 1]
sorted = False
if sorted:
break
return l
def bubble_sort_III(l, start, end, hook_func=None):
'''
@brief 冒泡排序算法,排序区间[start, end]
@param l 需要进行排序的list
@param start 开始位置
@param end 结束位置
@param hook_func hook函数这里主要是用作可视化
@note 使用sorted标志位记录后续的位置是否已经排序,按照冒泡排序算法的思路如果已经后面的位置未经过交换元素,后面一定已经有序
'''
count = 0
sorted = False
sorted_border = end
last_swap_index = 0
for i in range(start, end + 1):
sorted = True
for j in range(start, sorted_border):
if hook_func is not None:
hook_func(l, i, j, count)
count += 1
if l[j] > l[j + 1]:
l[j + 1], l[j] = l[j], l[j + 1]
sorted = False
last_swap_index = j
sorted_border = last_swap_index
if sorted:
break
return l
all = [bubble_sort, bubble_sort_II, bubble_sort_III]
| 25.414634
| 73
| 0.521113
| 278
| 2,084
| 3.794964
| 0.158273
| 0.034123
| 0.025592
| 0.034123
| 0.867299
| 0.798104
| 0.798104
| 0.798104
| 0.777251
| 0.777251
| 0
| 0.014717
| 0.380518
| 2,084
| 81
| 74
| 25.728395
| 0.802479
| 0.255278
| 0
| 0.8
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0.066667
| false
| 0
| 0
| 0
| 0.133333
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 7
|
d000bc1502e589f3a9f5354316d4814e3c38a030
| 1,323
|
py
|
Python
|
test/test_pythoscope_localizable.py
|
h4ck3rm1k3/pythoscope
|
0cefb34b86e2e81e29c0b93d27e3d4657db79912
|
[
"MIT"
] | null | null | null |
test/test_pythoscope_localizable.py
|
h4ck3rm1k3/pythoscope
|
0cefb34b86e2e81e29c0b93d27e3d4657db79912
|
[
"MIT"
] | null | null | null |
test/test_pythoscope_localizable.py
|
h4ck3rm1k3/pythoscope
|
0cefb34b86e2e81e29c0b93d27e3d4657db79912
|
[
"MIT"
] | null | null | null |
import unittest
class TestLocalizable(unittest.TestCase):
def test___init__(self):
# localizable = Localizable(project, subpath, created)
assert False # TODO: implement your test here
def test_exists(self):
# localizable = Localizable(project, subpath, created)
# self.assertEqual(expected, localizable.exists())
assert False # TODO: implement your test here
def test_get_path(self):
# localizable = Localizable(project, subpath, created)
# self.assertEqual(expected, localizable.get_path())
assert False # TODO: implement your test here
def test_is_out_of_sync(self):
# localizable = Localizable(project, subpath, created)
# self.assertEqual(expected, localizable.is_out_of_sync())
assert False # TODO: implement your test here
def test_is_up_to_date(self):
# localizable = Localizable(project, subpath, created)
# self.assertEqual(expected, localizable.is_up_to_date())
assert False # TODO: implement your test here
def test_write(self):
# localizable = Localizable(project, subpath, created)
# self.assertEqual(expected, localizable.write(new_content))
assert False # TODO: implement your test here
if __name__ == '__main__':
unittest.main()
| 36.75
| 68
| 0.685563
| 148
| 1,323
| 5.905405
| 0.25
| 0.048055
| 0.17849
| 0.226545
| 0.813501
| 0.813501
| 0.759725
| 0.718535
| 0.718535
| 0.570938
| 0
| 0
| 0.225246
| 1,323
| 35
| 69
| 37.8
| 0.852683
| 0.58579
| 0
| 0.375
| 0
| 0
| 0.015094
| 0
| 0
| 0
| 0
| 0.028571
| 0.375
| 1
| 0.375
| false
| 0
| 0.0625
| 0
| 0.5
| 0
| 0
| 0
| 0
| null | 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 1
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 8
|
d02f987c5347d2247e2d113df7b92d23191487f8
| 163,460
|
py
|
Python
|
octopus_deploy_swagger_client/octopus_deploy_client/releases_api.py
|
cvent/octopus-deploy-api-client
|
0e03e842e1beb29b132776aee077df570b88366a
|
[
"Apache-2.0"
] | null | null | null |
octopus_deploy_swagger_client/octopus_deploy_client/releases_api.py
|
cvent/octopus-deploy-api-client
|
0e03e842e1beb29b132776aee077df570b88366a
|
[
"Apache-2.0"
] | null | null | null |
octopus_deploy_swagger_client/octopus_deploy_client/releases_api.py
|
cvent/octopus-deploy-api-client
|
0e03e842e1beb29b132776aee077df570b88366a
|
[
"Apache-2.0"
] | null | null | null |
# coding: utf-8
"""
Octopus Server API
No description provided (generated by Swagger Codegen https://github.com/swagger-api/swagger-codegen) # noqa: E501
OpenAPI spec version: 2019.6.7+Branch.tags-2019.6.7.Sha.aa18dc6809953218c66f57eff7d26481d9b23d6a
Generated by: https://github.com/swagger-api/swagger-codegen.git
"""
from __future__ import absolute_import
import re # noqa: F401
# python 2 and python 3 compatibility library
import six
from octopus_deploy_swagger_client.api_client import ApiClient
class ReleasesApi(object):
"""NOTE: This class is auto generated by the swagger code generator program.
Do not edit the class manually.
Ref: https://github.com/swagger-api/swagger-codegen
"""
def __init__(self, api_client=None):
if api_client is None:
api_client = ApiClient()
self.api_client = api_client
def child_index_response_descriptor_projects_project_projects_release_project_resource_release_resource(self, **kwargs): # noqa: E501
"""Get a list of ReleaseResources # noqa: E501
Lists all of the releases that belong to the given project. Releases will be ordered from most recent to least recent. # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.child_index_response_descriptor_projects_project_projects_release_project_resource_release_resource(async_req=True)
>>> result = thread.get()
:param async_req bool
:param int skip: Number of items to skip
:param int take: Number of items to take
:return: ResourceCollectionProjectResource
If the method is called asynchronously,
returns the request thread.
"""
kwargs['_return_http_data_only'] = True
if kwargs.get('async_req'):
return self.child_index_response_descriptor_projects_project_projects_release_project_resource_release_resource_with_http_info(**kwargs) # noqa: E501
else:
(data) = self.child_index_response_descriptor_projects_project_projects_release_project_resource_release_resource_with_http_info(**kwargs) # noqa: E501
return data
def child_index_response_descriptor_projects_project_projects_release_project_resource_release_resource_with_http_info(self, **kwargs): # noqa: E501
"""Get a list of ReleaseResources # noqa: E501
Lists all of the releases that belong to the given project. Releases will be ordered from most recent to least recent. # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.child_index_response_descriptor_projects_project_projects_release_project_resource_release_resource_with_http_info(async_req=True)
>>> result = thread.get()
:param async_req bool
:param int skip: Number of items to skip
:param int take: Number of items to take
:return: ResourceCollectionProjectResource
If the method is called asynchronously,
returns the request thread.
"""
all_params = ['skip', 'take'] # noqa: E501
all_params.append('async_req')
all_params.append('_return_http_data_only')
all_params.append('_preload_content')
all_params.append('_request_timeout')
params = locals()
for key, val in six.iteritems(params['kwargs']):
if key not in all_params:
raise TypeError(
"Got an unexpected keyword argument '%s'"
" to method child_index_response_descriptor_projects_project_projects_release_project_resource_release_resource" % key
)
params[key] = val
del params['kwargs']
collection_formats = {}
path_params = {}
query_params = []
if 'skip' in params:
query_params.append(('skip', params['skip'])) # noqa: E501
if 'take' in params:
query_params.append(('take', params['take'])) # noqa: E501
header_params = {}
form_params = []
local_var_files = {}
body_params = None
# HTTP header `Accept`
header_params['Accept'] = self.api_client.select_header_accept(
['application/json']) # noqa: E501
# Authentication setting
auth_settings = ['APIKeyHeader', 'APIKeyQuery', 'NugetApiKeyHeader'] # noqa: E501
return self.api_client.call_api(
'/api/projects/{id}/releases', 'GET',
path_params,
query_params,
header_params,
body=body_params,
post_params=form_params,
files=local_var_files,
response_type='ResourceCollectionProjectResource', # noqa: E501
auth_settings=auth_settings,
async_req=params.get('async_req'),
_return_http_data_only=params.get('_return_http_data_only'),
_preload_content=params.get('_preload_content', True),
_request_timeout=params.get('_request_timeout'),
collection_formats=collection_formats)
def child_index_response_descriptor_projects_project_projects_release_project_resource_release_resource_spaces(self, base_space_id, **kwargs): # noqa: E501
"""Get a list of ReleaseResources # noqa: E501
Lists all of the releases that belong to the given project. Releases will be ordered from most recent to least recent. # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.child_index_response_descriptor_projects_project_projects_release_project_resource_release_resource_spaces(base_space_id, async_req=True)
>>> result = thread.get()
:param async_req bool
:param str base_space_id: ID of the space (required)
:param int skip: Number of items to skip
:param int take: Number of items to take
:return: ResourceCollectionProjectResource
If the method is called asynchronously,
returns the request thread.
"""
kwargs['_return_http_data_only'] = True
if kwargs.get('async_req'):
return self.child_index_response_descriptor_projects_project_projects_release_project_resource_release_resource_spaces_with_http_info(base_space_id, **kwargs) # noqa: E501
else:
(data) = self.child_index_response_descriptor_projects_project_projects_release_project_resource_release_resource_spaces_with_http_info(base_space_id, **kwargs) # noqa: E501
return data
def child_index_response_descriptor_projects_project_projects_release_project_resource_release_resource_spaces_with_http_info(self, base_space_id, **kwargs): # noqa: E501
"""Get a list of ReleaseResources # noqa: E501
Lists all of the releases that belong to the given project. Releases will be ordered from most recent to least recent. # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.child_index_response_descriptor_projects_project_projects_release_project_resource_release_resource_spaces_with_http_info(base_space_id, async_req=True)
>>> result = thread.get()
:param async_req bool
:param str base_space_id: ID of the space (required)
:param int skip: Number of items to skip
:param int take: Number of items to take
:return: ResourceCollectionProjectResource
If the method is called asynchronously,
returns the request thread.
"""
all_params = ['base_space_id', 'skip', 'take'] # noqa: E501
all_params.append('async_req')
all_params.append('_return_http_data_only')
all_params.append('_preload_content')
all_params.append('_request_timeout')
params = locals()
for key, val in six.iteritems(params['kwargs']):
if key not in all_params:
raise TypeError(
"Got an unexpected keyword argument '%s'"
" to method child_index_response_descriptor_projects_project_projects_release_project_resource_release_resource_spaces" % key
)
params[key] = val
del params['kwargs']
# verify the required parameter 'base_space_id' is set
if ('base_space_id' not in params or
params['base_space_id'] is None):
raise ValueError("Missing the required parameter `base_space_id` when calling `child_index_response_descriptor_projects_project_projects_release_project_resource_release_resource_spaces`") # noqa: E501
collection_formats = {}
path_params = {}
if 'base_space_id' in params:
path_params['baseSpaceId'] = params['base_space_id'] # noqa: E501
query_params = []
if 'skip' in params:
query_params.append(('skip', params['skip'])) # noqa: E501
if 'take' in params:
query_params.append(('take', params['take'])) # noqa: E501
header_params = {}
form_params = []
local_var_files = {}
body_params = None
# HTTP header `Accept`
header_params['Accept'] = self.api_client.select_header_accept(
['application/json']) # noqa: E501
# Authentication setting
auth_settings = ['APIKeyHeader', 'APIKeyQuery', 'NugetApiKeyHeader'] # noqa: E501
return self.api_client.call_api(
'/api/{baseSpaceId}/projects/{id}/releases', 'GET',
path_params,
query_params,
header_params,
body=body_params,
post_params=form_params,
files=local_var_files,
response_type='ResourceCollectionProjectResource', # noqa: E501
auth_settings=auth_settings,
async_req=params.get('async_req'),
_return_http_data_only=params.get('_return_http_data_only'),
_preload_content=params.get('_preload_content', True),
_request_timeout=params.get('_request_timeout'),
collection_formats=collection_formats)
def child_index_response_descriptor_projects_release_projects_deployment_release_resource_deployment_resource(self, **kwargs): # noqa: E501
"""Get a list of DeploymentResources # noqa: E501
Lists all of the deployments that belong to the given release. Deployments will be ordered from most recent to least recent. # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.child_index_response_descriptor_projects_release_projects_deployment_release_resource_deployment_resource(async_req=True)
>>> result = thread.get()
:param async_req bool
:param int skip: Number of items to skip
:param int take: Number of items to take
:return: ResourceCollectionReleaseResource
If the method is called asynchronously,
returns the request thread.
"""
kwargs['_return_http_data_only'] = True
if kwargs.get('async_req'):
return self.child_index_response_descriptor_projects_release_projects_deployment_release_resource_deployment_resource_with_http_info(**kwargs) # noqa: E501
else:
(data) = self.child_index_response_descriptor_projects_release_projects_deployment_release_resource_deployment_resource_with_http_info(**kwargs) # noqa: E501
return data
def child_index_response_descriptor_projects_release_projects_deployment_release_resource_deployment_resource_with_http_info(self, **kwargs): # noqa: E501
"""Get a list of DeploymentResources # noqa: E501
Lists all of the deployments that belong to the given release. Deployments will be ordered from most recent to least recent. # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.child_index_response_descriptor_projects_release_projects_deployment_release_resource_deployment_resource_with_http_info(async_req=True)
>>> result = thread.get()
:param async_req bool
:param int skip: Number of items to skip
:param int take: Number of items to take
:return: ResourceCollectionReleaseResource
If the method is called asynchronously,
returns the request thread.
"""
all_params = ['skip', 'take'] # noqa: E501
all_params.append('async_req')
all_params.append('_return_http_data_only')
all_params.append('_preload_content')
all_params.append('_request_timeout')
params = locals()
for key, val in six.iteritems(params['kwargs']):
if key not in all_params:
raise TypeError(
"Got an unexpected keyword argument '%s'"
" to method child_index_response_descriptor_projects_release_projects_deployment_release_resource_deployment_resource" % key
)
params[key] = val
del params['kwargs']
collection_formats = {}
path_params = {}
query_params = []
if 'skip' in params:
query_params.append(('skip', params['skip'])) # noqa: E501
if 'take' in params:
query_params.append(('take', params['take'])) # noqa: E501
header_params = {}
form_params = []
local_var_files = {}
body_params = None
# HTTP header `Accept`
header_params['Accept'] = self.api_client.select_header_accept(
['application/json']) # noqa: E501
# Authentication setting
auth_settings = ['APIKeyHeader', 'APIKeyQuery', 'NugetApiKeyHeader'] # noqa: E501
return self.api_client.call_api(
'/api/releases/{id}/deployments', 'GET',
path_params,
query_params,
header_params,
body=body_params,
post_params=form_params,
files=local_var_files,
response_type='ResourceCollectionReleaseResource', # noqa: E501
auth_settings=auth_settings,
async_req=params.get('async_req'),
_return_http_data_only=params.get('_return_http_data_only'),
_preload_content=params.get('_preload_content', True),
_request_timeout=params.get('_request_timeout'),
collection_formats=collection_formats)
def child_index_response_descriptor_projects_release_projects_deployment_release_resource_deployment_resource_spaces(self, base_space_id, **kwargs): # noqa: E501
"""Get a list of DeploymentResources # noqa: E501
Lists all of the deployments that belong to the given release. Deployments will be ordered from most recent to least recent. # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.child_index_response_descriptor_projects_release_projects_deployment_release_resource_deployment_resource_spaces(base_space_id, async_req=True)
>>> result = thread.get()
:param async_req bool
:param str base_space_id: ID of the space (required)
:param int skip: Number of items to skip
:param int take: Number of items to take
:return: ResourceCollectionReleaseResource
If the method is called asynchronously,
returns the request thread.
"""
kwargs['_return_http_data_only'] = True
if kwargs.get('async_req'):
return self.child_index_response_descriptor_projects_release_projects_deployment_release_resource_deployment_resource_spaces_with_http_info(base_space_id, **kwargs) # noqa: E501
else:
(data) = self.child_index_response_descriptor_projects_release_projects_deployment_release_resource_deployment_resource_spaces_with_http_info(base_space_id, **kwargs) # noqa: E501
return data
def child_index_response_descriptor_projects_release_projects_deployment_release_resource_deployment_resource_spaces_with_http_info(self, base_space_id, **kwargs): # noqa: E501
"""Get a list of DeploymentResources # noqa: E501
Lists all of the deployments that belong to the given release. Deployments will be ordered from most recent to least recent. # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.child_index_response_descriptor_projects_release_projects_deployment_release_resource_deployment_resource_spaces_with_http_info(base_space_id, async_req=True)
>>> result = thread.get()
:param async_req bool
:param str base_space_id: ID of the space (required)
:param int skip: Number of items to skip
:param int take: Number of items to take
:return: ResourceCollectionReleaseResource
If the method is called asynchronously,
returns the request thread.
"""
all_params = ['base_space_id', 'skip', 'take'] # noqa: E501
all_params.append('async_req')
all_params.append('_return_http_data_only')
all_params.append('_preload_content')
all_params.append('_request_timeout')
params = locals()
for key, val in six.iteritems(params['kwargs']):
if key not in all_params:
raise TypeError(
"Got an unexpected keyword argument '%s'"
" to method child_index_response_descriptor_projects_release_projects_deployment_release_resource_deployment_resource_spaces" % key
)
params[key] = val
del params['kwargs']
# verify the required parameter 'base_space_id' is set
if ('base_space_id' not in params or
params['base_space_id'] is None):
raise ValueError("Missing the required parameter `base_space_id` when calling `child_index_response_descriptor_projects_release_projects_deployment_release_resource_deployment_resource_spaces`") # noqa: E501
collection_formats = {}
path_params = {}
if 'base_space_id' in params:
path_params['baseSpaceId'] = params['base_space_id'] # noqa: E501
query_params = []
if 'skip' in params:
query_params.append(('skip', params['skip'])) # noqa: E501
if 'take' in params:
query_params.append(('take', params['take'])) # noqa: E501
header_params = {}
form_params = []
local_var_files = {}
body_params = None
# HTTP header `Accept`
header_params['Accept'] = self.api_client.select_header_accept(
['application/json']) # noqa: E501
# Authentication setting
auth_settings = ['APIKeyHeader', 'APIKeyQuery', 'NugetApiKeyHeader'] # noqa: E501
return self.api_client.call_api(
'/api/{baseSpaceId}/releases/{id}/deployments', 'GET',
path_params,
query_params,
header_params,
body=body_params,
post_params=form_params,
files=local_var_files,
response_type='ResourceCollectionReleaseResource', # noqa: E501
auth_settings=auth_settings,
async_req=params.get('async_req'),
_return_http_data_only=params.get('_return_http_data_only'),
_preload_content=params.get('_preload_content', True),
_request_timeout=params.get('_request_timeout'),
collection_formats=collection_formats)
def create_response_descriptor_projects_release_release_resource(self, **kwargs): # noqa: E501
"""Create a ReleaseResource # noqa: E501
Creates a new release. # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.create_response_descriptor_projects_release_release_resource(async_req=True)
>>> result = thread.get()
:param async_req bool
:param ReleaseResource release_resource: The ReleaseResource resource to create
:return: ReleaseResource
If the method is called asynchronously,
returns the request thread.
"""
kwargs['_return_http_data_only'] = True
if kwargs.get('async_req'):
return self.create_response_descriptor_projects_release_release_resource_with_http_info(**kwargs) # noqa: E501
else:
(data) = self.create_response_descriptor_projects_release_release_resource_with_http_info(**kwargs) # noqa: E501
return data
def create_response_descriptor_projects_release_release_resource_with_http_info(self, **kwargs): # noqa: E501
"""Create a ReleaseResource # noqa: E501
Creates a new release. # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.create_response_descriptor_projects_release_release_resource_with_http_info(async_req=True)
>>> result = thread.get()
:param async_req bool
:param ReleaseResource release_resource: The ReleaseResource resource to create
:return: ReleaseResource
If the method is called asynchronously,
returns the request thread.
"""
all_params = ['release_resource'] # noqa: E501
all_params.append('async_req')
all_params.append('_return_http_data_only')
all_params.append('_preload_content')
all_params.append('_request_timeout')
params = locals()
for key, val in six.iteritems(params['kwargs']):
if key not in all_params:
raise TypeError(
"Got an unexpected keyword argument '%s'"
" to method create_response_descriptor_projects_release_release_resource" % key
)
params[key] = val
del params['kwargs']
collection_formats = {}
path_params = {}
query_params = []
header_params = {}
form_params = []
local_var_files = {}
body_params = None
if 'release_resource' in params:
body_params = params['release_resource']
# HTTP header `Accept`
header_params['Accept'] = self.api_client.select_header_accept(
['application/json']) # noqa: E501
# Authentication setting
auth_settings = ['APIKeyHeader', 'APIKeyQuery', 'NugetApiKeyHeader'] # noqa: E501
return self.api_client.call_api(
'/api/releases', 'POST',
path_params,
query_params,
header_params,
body=body_params,
post_params=form_params,
files=local_var_files,
response_type='ReleaseResource', # noqa: E501
auth_settings=auth_settings,
async_req=params.get('async_req'),
_return_http_data_only=params.get('_return_http_data_only'),
_preload_content=params.get('_preload_content', True),
_request_timeout=params.get('_request_timeout'),
collection_formats=collection_formats)
def create_response_descriptor_projects_release_release_resource_spaces(self, base_space_id, **kwargs): # noqa: E501
"""Create a ReleaseResource # noqa: E501
Creates a new release. # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.create_response_descriptor_projects_release_release_resource_spaces(base_space_id, async_req=True)
>>> result = thread.get()
:param async_req bool
:param str base_space_id: ID of the space (required)
:param ReleaseResource release_resource: The ReleaseResource resource to create
:return: ReleaseResource
If the method is called asynchronously,
returns the request thread.
"""
kwargs['_return_http_data_only'] = True
if kwargs.get('async_req'):
return self.create_response_descriptor_projects_release_release_resource_spaces_with_http_info(base_space_id, **kwargs) # noqa: E501
else:
(data) = self.create_response_descriptor_projects_release_release_resource_spaces_with_http_info(base_space_id, **kwargs) # noqa: E501
return data
def create_response_descriptor_projects_release_release_resource_spaces_with_http_info(self, base_space_id, **kwargs): # noqa: E501
"""Create a ReleaseResource # noqa: E501
Creates a new release. # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.create_response_descriptor_projects_release_release_resource_spaces_with_http_info(base_space_id, async_req=True)
>>> result = thread.get()
:param async_req bool
:param str base_space_id: ID of the space (required)
:param ReleaseResource release_resource: The ReleaseResource resource to create
:return: ReleaseResource
If the method is called asynchronously,
returns the request thread.
"""
all_params = ['base_space_id', 'release_resource'] # noqa: E501
all_params.append('async_req')
all_params.append('_return_http_data_only')
all_params.append('_preload_content')
all_params.append('_request_timeout')
params = locals()
for key, val in six.iteritems(params['kwargs']):
if key not in all_params:
raise TypeError(
"Got an unexpected keyword argument '%s'"
" to method create_response_descriptor_projects_release_release_resource_spaces" % key
)
params[key] = val
del params['kwargs']
# verify the required parameter 'base_space_id' is set
if ('base_space_id' not in params or
params['base_space_id'] is None):
raise ValueError("Missing the required parameter `base_space_id` when calling `create_response_descriptor_projects_release_release_resource_spaces`") # noqa: E501
collection_formats = {}
path_params = {}
if 'base_space_id' in params:
path_params['baseSpaceId'] = params['base_space_id'] # noqa: E501
query_params = []
header_params = {}
form_params = []
local_var_files = {}
body_params = None
if 'release_resource' in params:
body_params = params['release_resource']
# HTTP header `Accept`
header_params['Accept'] = self.api_client.select_header_accept(
['application/json']) # noqa: E501
# Authentication setting
auth_settings = ['APIKeyHeader', 'APIKeyQuery', 'NugetApiKeyHeader'] # noqa: E501
return self.api_client.call_api(
'/api/{baseSpaceId}/releases', 'POST',
path_params,
query_params,
header_params,
body=body_params,
post_params=form_params,
files=local_var_files,
response_type='ReleaseResource', # noqa: E501
auth_settings=auth_settings,
async_req=params.get('async_req'),
_return_http_data_only=params.get('_return_http_data_only'),
_preload_content=params.get('_preload_content', True),
_request_timeout=params.get('_request_timeout'),
collection_formats=collection_formats)
def custom_action_response_descriptor_octopus_server_web_api_actions_defect_reported_responder(self, id, **kwargs): # noqa: E501
"""custom_action_response_descriptor_octopus_server_web_api_actions_defect_reported_responder # noqa: E501
Record defect in a release. NOTE: This definition is not complete. We will be adding more detail in future releases of Octopus. # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.custom_action_response_descriptor_octopus_server_web_api_actions_defect_reported_responder(id, async_req=True)
>>> result = thread.get()
:param async_req bool
:param str id: ID of the resource (required)
:return: DefectResource
If the method is called asynchronously,
returns the request thread.
"""
kwargs['_return_http_data_only'] = True
if kwargs.get('async_req'):
return self.custom_action_response_descriptor_octopus_server_web_api_actions_defect_reported_responder_with_http_info(id, **kwargs) # noqa: E501
else:
(data) = self.custom_action_response_descriptor_octopus_server_web_api_actions_defect_reported_responder_with_http_info(id, **kwargs) # noqa: E501
return data
def custom_action_response_descriptor_octopus_server_web_api_actions_defect_reported_responder_with_http_info(self, id, **kwargs): # noqa: E501
"""custom_action_response_descriptor_octopus_server_web_api_actions_defect_reported_responder # noqa: E501
Record defect in a release. NOTE: This definition is not complete. We will be adding more detail in future releases of Octopus. # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.custom_action_response_descriptor_octopus_server_web_api_actions_defect_reported_responder_with_http_info(id, async_req=True)
>>> result = thread.get()
:param async_req bool
:param str id: ID of the resource (required)
:return: DefectResource
If the method is called asynchronously,
returns the request thread.
"""
all_params = ['id'] # noqa: E501
all_params.append('async_req')
all_params.append('_return_http_data_only')
all_params.append('_preload_content')
all_params.append('_request_timeout')
params = locals()
for key, val in six.iteritems(params['kwargs']):
if key not in all_params:
raise TypeError(
"Got an unexpected keyword argument '%s'"
" to method custom_action_response_descriptor_octopus_server_web_api_actions_defect_reported_responder" % key
)
params[key] = val
del params['kwargs']
# verify the required parameter 'id' is set
if ('id' not in params or
params['id'] is None):
raise ValueError("Missing the required parameter `id` when calling `custom_action_response_descriptor_octopus_server_web_api_actions_defect_reported_responder`") # noqa: E501
collection_formats = {}
path_params = {}
if 'id' in params:
path_params['id'] = params['id'] # noqa: E501
query_params = []
header_params = {}
form_params = []
local_var_files = {}
body_params = None
# HTTP header `Accept`
header_params['Accept'] = self.api_client.select_header_accept(
['application/json']) # noqa: E501
# Authentication setting
auth_settings = ['APIKeyHeader', 'APIKeyQuery', 'NugetApiKeyHeader'] # noqa: E501
return self.api_client.call_api(
'/api/releases/{id}/defects', 'POST',
path_params,
query_params,
header_params,
body=body_params,
post_params=form_params,
files=local_var_files,
response_type='DefectResource', # noqa: E501
auth_settings=auth_settings,
async_req=params.get('async_req'),
_return_http_data_only=params.get('_return_http_data_only'),
_preload_content=params.get('_preload_content', True),
_request_timeout=params.get('_request_timeout'),
collection_formats=collection_formats)
def custom_action_response_descriptor_octopus_server_web_api_actions_defect_reported_responder_spaces(self, base_space_id, id, **kwargs): # noqa: E501
"""custom_action_response_descriptor_octopus_server_web_api_actions_defect_reported_responder_spaces # noqa: E501
Record defect in a release. NOTE: This definition is not complete. We will be adding more detail in future releases of Octopus. # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.custom_action_response_descriptor_octopus_server_web_api_actions_defect_reported_responder_spaces(base_space_id, id, async_req=True)
>>> result = thread.get()
:param async_req bool
:param str base_space_id: ID of the space (required)
:param str id: ID of the resource (required)
:return: DefectResource
If the method is called asynchronously,
returns the request thread.
"""
kwargs['_return_http_data_only'] = True
if kwargs.get('async_req'):
return self.custom_action_response_descriptor_octopus_server_web_api_actions_defect_reported_responder_spaces_with_http_info(base_space_id, id, **kwargs) # noqa: E501
else:
(data) = self.custom_action_response_descriptor_octopus_server_web_api_actions_defect_reported_responder_spaces_with_http_info(base_space_id, id, **kwargs) # noqa: E501
return data
def custom_action_response_descriptor_octopus_server_web_api_actions_defect_reported_responder_spaces_with_http_info(self, base_space_id, id, **kwargs): # noqa: E501
"""custom_action_response_descriptor_octopus_server_web_api_actions_defect_reported_responder_spaces # noqa: E501
Record defect in a release. NOTE: This definition is not complete. We will be adding more detail in future releases of Octopus. # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.custom_action_response_descriptor_octopus_server_web_api_actions_defect_reported_responder_spaces_with_http_info(base_space_id, id, async_req=True)
>>> result = thread.get()
:param async_req bool
:param str base_space_id: ID of the space (required)
:param str id: ID of the resource (required)
:return: DefectResource
If the method is called asynchronously,
returns the request thread.
"""
all_params = ['base_space_id', 'id'] # noqa: E501
all_params.append('async_req')
all_params.append('_return_http_data_only')
all_params.append('_preload_content')
all_params.append('_request_timeout')
params = locals()
for key, val in six.iteritems(params['kwargs']):
if key not in all_params:
raise TypeError(
"Got an unexpected keyword argument '%s'"
" to method custom_action_response_descriptor_octopus_server_web_api_actions_defect_reported_responder_spaces" % key
)
params[key] = val
del params['kwargs']
# verify the required parameter 'base_space_id' is set
if ('base_space_id' not in params or
params['base_space_id'] is None):
raise ValueError("Missing the required parameter `base_space_id` when calling `custom_action_response_descriptor_octopus_server_web_api_actions_defect_reported_responder_spaces`") # noqa: E501
# verify the required parameter 'id' is set
if ('id' not in params or
params['id'] is None):
raise ValueError("Missing the required parameter `id` when calling `custom_action_response_descriptor_octopus_server_web_api_actions_defect_reported_responder_spaces`") # noqa: E501
collection_formats = {}
path_params = {}
if 'base_space_id' in params:
path_params['baseSpaceId'] = params['base_space_id'] # noqa: E501
if 'id' in params:
path_params['id'] = params['id'] # noqa: E501
query_params = []
header_params = {}
form_params = []
local_var_files = {}
body_params = None
# HTTP header `Accept`
header_params['Accept'] = self.api_client.select_header_accept(
['application/json']) # noqa: E501
# Authentication setting
auth_settings = ['APIKeyHeader', 'APIKeyQuery', 'NugetApiKeyHeader'] # noqa: E501
return self.api_client.call_api(
'/api/{baseSpaceId}/releases/{id}/defects', 'POST',
path_params,
query_params,
header_params,
body=body_params,
post_params=form_params,
files=local_var_files,
response_type='DefectResource', # noqa: E501
auth_settings=auth_settings,
async_req=params.get('async_req'),
_return_http_data_only=params.get('_return_http_data_only'),
_preload_content=params.get('_preload_content', True),
_request_timeout=params.get('_request_timeout'),
collection_formats=collection_formats)
def custom_action_response_descriptor_octopus_server_web_api_actions_defect_resolved_responder(self, id, **kwargs): # noqa: E501
"""custom_action_response_descriptor_octopus_server_web_api_actions_defect_resolved_responder # noqa: E501
Update or resolve defect in a release. NOTE: This definition is not complete. We will be adding more detail in future releases of Octopus. # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.custom_action_response_descriptor_octopus_server_web_api_actions_defect_resolved_responder(id, async_req=True)
>>> result = thread.get()
:param async_req bool
:param str id: ID of the resource (required)
:return: DefectResource
If the method is called asynchronously,
returns the request thread.
"""
kwargs['_return_http_data_only'] = True
if kwargs.get('async_req'):
return self.custom_action_response_descriptor_octopus_server_web_api_actions_defect_resolved_responder_with_http_info(id, **kwargs) # noqa: E501
else:
(data) = self.custom_action_response_descriptor_octopus_server_web_api_actions_defect_resolved_responder_with_http_info(id, **kwargs) # noqa: E501
return data
def custom_action_response_descriptor_octopus_server_web_api_actions_defect_resolved_responder_with_http_info(self, id, **kwargs): # noqa: E501
"""custom_action_response_descriptor_octopus_server_web_api_actions_defect_resolved_responder # noqa: E501
Update or resolve defect in a release. NOTE: This definition is not complete. We will be adding more detail in future releases of Octopus. # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.custom_action_response_descriptor_octopus_server_web_api_actions_defect_resolved_responder_with_http_info(id, async_req=True)
>>> result = thread.get()
:param async_req bool
:param str id: ID of the resource (required)
:return: DefectResource
If the method is called asynchronously,
returns the request thread.
"""
all_params = ['id'] # noqa: E501
all_params.append('async_req')
all_params.append('_return_http_data_only')
all_params.append('_preload_content')
all_params.append('_request_timeout')
params = locals()
for key, val in six.iteritems(params['kwargs']):
if key not in all_params:
raise TypeError(
"Got an unexpected keyword argument '%s'"
" to method custom_action_response_descriptor_octopus_server_web_api_actions_defect_resolved_responder" % key
)
params[key] = val
del params['kwargs']
# verify the required parameter 'id' is set
if ('id' not in params or
params['id'] is None):
raise ValueError("Missing the required parameter `id` when calling `custom_action_response_descriptor_octopus_server_web_api_actions_defect_resolved_responder`") # noqa: E501
collection_formats = {}
path_params = {}
if 'id' in params:
path_params['id'] = params['id'] # noqa: E501
query_params = []
header_params = {}
form_params = []
local_var_files = {}
body_params = None
# HTTP header `Accept`
header_params['Accept'] = self.api_client.select_header_accept(
['application/json']) # noqa: E501
# Authentication setting
auth_settings = ['APIKeyHeader', 'APIKeyQuery', 'NugetApiKeyHeader'] # noqa: E501
return self.api_client.call_api(
'/api/releases/{id}/defects/resolve', 'POST',
path_params,
query_params,
header_params,
body=body_params,
post_params=form_params,
files=local_var_files,
response_type='DefectResource', # noqa: E501
auth_settings=auth_settings,
async_req=params.get('async_req'),
_return_http_data_only=params.get('_return_http_data_only'),
_preload_content=params.get('_preload_content', True),
_request_timeout=params.get('_request_timeout'),
collection_formats=collection_formats)
def custom_action_response_descriptor_octopus_server_web_api_actions_defect_resolved_responder_spaces(self, base_space_id, id, **kwargs): # noqa: E501
"""custom_action_response_descriptor_octopus_server_web_api_actions_defect_resolved_responder_spaces # noqa: E501
Update or resolve defect in a release. NOTE: This definition is not complete. We will be adding more detail in future releases of Octopus. # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.custom_action_response_descriptor_octopus_server_web_api_actions_defect_resolved_responder_spaces(base_space_id, id, async_req=True)
>>> result = thread.get()
:param async_req bool
:param str base_space_id: ID of the space (required)
:param str id: ID of the resource (required)
:return: DefectResource
If the method is called asynchronously,
returns the request thread.
"""
kwargs['_return_http_data_only'] = True
if kwargs.get('async_req'):
return self.custom_action_response_descriptor_octopus_server_web_api_actions_defect_resolved_responder_spaces_with_http_info(base_space_id, id, **kwargs) # noqa: E501
else:
(data) = self.custom_action_response_descriptor_octopus_server_web_api_actions_defect_resolved_responder_spaces_with_http_info(base_space_id, id, **kwargs) # noqa: E501
return data
def custom_action_response_descriptor_octopus_server_web_api_actions_defect_resolved_responder_spaces_with_http_info(self, base_space_id, id, **kwargs): # noqa: E501
"""custom_action_response_descriptor_octopus_server_web_api_actions_defect_resolved_responder_spaces # noqa: E501
Update or resolve defect in a release. NOTE: This definition is not complete. We will be adding more detail in future releases of Octopus. # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.custom_action_response_descriptor_octopus_server_web_api_actions_defect_resolved_responder_spaces_with_http_info(base_space_id, id, async_req=True)
>>> result = thread.get()
:param async_req bool
:param str base_space_id: ID of the space (required)
:param str id: ID of the resource (required)
:return: DefectResource
If the method is called asynchronously,
returns the request thread.
"""
all_params = ['base_space_id', 'id'] # noqa: E501
all_params.append('async_req')
all_params.append('_return_http_data_only')
all_params.append('_preload_content')
all_params.append('_request_timeout')
params = locals()
for key, val in six.iteritems(params['kwargs']):
if key not in all_params:
raise TypeError(
"Got an unexpected keyword argument '%s'"
" to method custom_action_response_descriptor_octopus_server_web_api_actions_defect_resolved_responder_spaces" % key
)
params[key] = val
del params['kwargs']
# verify the required parameter 'base_space_id' is set
if ('base_space_id' not in params or
params['base_space_id'] is None):
raise ValueError("Missing the required parameter `base_space_id` when calling `custom_action_response_descriptor_octopus_server_web_api_actions_defect_resolved_responder_spaces`") # noqa: E501
# verify the required parameter 'id' is set
if ('id' not in params or
params['id'] is None):
raise ValueError("Missing the required parameter `id` when calling `custom_action_response_descriptor_octopus_server_web_api_actions_defect_resolved_responder_spaces`") # noqa: E501
collection_formats = {}
path_params = {}
if 'base_space_id' in params:
path_params['baseSpaceId'] = params['base_space_id'] # noqa: E501
if 'id' in params:
path_params['id'] = params['id'] # noqa: E501
query_params = []
header_params = {}
form_params = []
local_var_files = {}
body_params = None
# HTTP header `Accept`
header_params['Accept'] = self.api_client.select_header_accept(
['application/json']) # noqa: E501
# Authentication setting
auth_settings = ['APIKeyHeader', 'APIKeyQuery', 'NugetApiKeyHeader'] # noqa: E501
return self.api_client.call_api(
'/api/{baseSpaceId}/releases/{id}/defects/resolve', 'POST',
path_params,
query_params,
header_params,
body=body_params,
post_params=form_params,
files=local_var_files,
response_type='DefectResource', # noqa: E501
auth_settings=auth_settings,
async_req=params.get('async_req'),
_return_http_data_only=params.get('_return_http_data_only'),
_preload_content=params.get('_preload_content', True),
_request_timeout=params.get('_request_timeout'),
collection_formats=collection_formats)
def custom_action_response_descriptor_octopus_server_web_api_actions_deployment_preview_action(self, environment, tenant, id, **kwargs): # noqa: E501
"""custom_action_response_descriptor_octopus_server_web_api_actions_deployment_preview_action # noqa: E501
Gets a document that describes what steps will/won't be run during a deployment to a given environment. NOTE: This definition is not complete. We will be adding more detail in future releases of Octopus. # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.custom_action_response_descriptor_octopus_server_web_api_actions_deployment_preview_action(environment, tenant, id, async_req=True)
>>> result = thread.get()
:param async_req bool
:param str environment: ID of the environment (required)
:param str tenant: ID of the tenant (required)
:param str id: ID of the resource (required)
:return: DeploymentPreviewResource
If the method is called asynchronously,
returns the request thread.
"""
kwargs['_return_http_data_only'] = True
if kwargs.get('async_req'):
return self.custom_action_response_descriptor_octopus_server_web_api_actions_deployment_preview_action_with_http_info(environment, tenant, id, **kwargs) # noqa: E501
else:
(data) = self.custom_action_response_descriptor_octopus_server_web_api_actions_deployment_preview_action_with_http_info(environment, tenant, id, **kwargs) # noqa: E501
return data
def custom_action_response_descriptor_octopus_server_web_api_actions_deployment_preview_action_with_http_info(self, environment, tenant, id, **kwargs): # noqa: E501
"""custom_action_response_descriptor_octopus_server_web_api_actions_deployment_preview_action # noqa: E501
Gets a document that describes what steps will/won't be run during a deployment to a given environment. NOTE: This definition is not complete. We will be adding more detail in future releases of Octopus. # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.custom_action_response_descriptor_octopus_server_web_api_actions_deployment_preview_action_with_http_info(environment, tenant, id, async_req=True)
>>> result = thread.get()
:param async_req bool
:param str environment: ID of the environment (required)
:param str tenant: ID of the tenant (required)
:param str id: ID of the resource (required)
:return: DeploymentPreviewResource
If the method is called asynchronously,
returns the request thread.
"""
all_params = ['environment', 'tenant', 'id'] # noqa: E501
all_params.append('async_req')
all_params.append('_return_http_data_only')
all_params.append('_preload_content')
all_params.append('_request_timeout')
params = locals()
for key, val in six.iteritems(params['kwargs']):
if key not in all_params:
raise TypeError(
"Got an unexpected keyword argument '%s'"
" to method custom_action_response_descriptor_octopus_server_web_api_actions_deployment_preview_action" % key
)
params[key] = val
del params['kwargs']
# verify the required parameter 'environment' is set
if ('environment' not in params or
params['environment'] is None):
raise ValueError("Missing the required parameter `environment` when calling `custom_action_response_descriptor_octopus_server_web_api_actions_deployment_preview_action`") # noqa: E501
# verify the required parameter 'tenant' is set
if ('tenant' not in params or
params['tenant'] is None):
raise ValueError("Missing the required parameter `tenant` when calling `custom_action_response_descriptor_octopus_server_web_api_actions_deployment_preview_action`") # noqa: E501
# verify the required parameter 'id' is set
if ('id' not in params or
params['id'] is None):
raise ValueError("Missing the required parameter `id` when calling `custom_action_response_descriptor_octopus_server_web_api_actions_deployment_preview_action`") # noqa: E501
collection_formats = {}
path_params = {}
if 'environment' in params:
path_params['environment'] = params['environment'] # noqa: E501
if 'tenant' in params:
path_params['tenant'] = params['tenant'] # noqa: E501
if 'id' in params:
path_params['id'] = params['id'] # noqa: E501
query_params = []
header_params = {}
form_params = []
local_var_files = {}
body_params = None
# HTTP header `Accept`
header_params['Accept'] = self.api_client.select_header_accept(
['application/json']) # noqa: E501
# Authentication setting
auth_settings = ['APIKeyHeader', 'APIKeyQuery', 'NugetApiKeyHeader'] # noqa: E501
return self.api_client.call_api(
'/api/releases/{id}/deployments/preview/{environment}', 'GET',
path_params,
query_params,
header_params,
body=body_params,
post_params=form_params,
files=local_var_files,
response_type='DeploymentPreviewResource', # noqa: E501
auth_settings=auth_settings,
async_req=params.get('async_req'),
_return_http_data_only=params.get('_return_http_data_only'),
_preload_content=params.get('_preload_content', True),
_request_timeout=params.get('_request_timeout'),
collection_formats=collection_formats)
def custom_action_response_descriptor_octopus_server_web_api_actions_deployment_preview_action_0(self, environment, tenant, id, **kwargs): # noqa: E501
"""custom_action_response_descriptor_octopus_server_web_api_actions_deployment_preview_action_0 # noqa: E501
Gets a document that describes what steps will/won't be run during a deployment to a given environment for the given tenant. NOTE: This definition is not complete. We will be adding more detail in future releases of Octopus. # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.custom_action_response_descriptor_octopus_server_web_api_actions_deployment_preview_action_0(environment, tenant, id, async_req=True)
>>> result = thread.get()
:param async_req bool
:param str environment: ID of the environment (required)
:param str tenant: ID of the tenant (required)
:param str id: ID of the resource (required)
:return: DeploymentPreviewResource
If the method is called asynchronously,
returns the request thread.
"""
kwargs['_return_http_data_only'] = True
if kwargs.get('async_req'):
return self.custom_action_response_descriptor_octopus_server_web_api_actions_deployment_preview_action_0_with_http_info(environment, tenant, id, **kwargs) # noqa: E501
else:
(data) = self.custom_action_response_descriptor_octopus_server_web_api_actions_deployment_preview_action_0_with_http_info(environment, tenant, id, **kwargs) # noqa: E501
return data
def custom_action_response_descriptor_octopus_server_web_api_actions_deployment_preview_action_0_with_http_info(self, environment, tenant, id, **kwargs): # noqa: E501
"""custom_action_response_descriptor_octopus_server_web_api_actions_deployment_preview_action_0 # noqa: E501
Gets a document that describes what steps will/won't be run during a deployment to a given environment for the given tenant. NOTE: This definition is not complete. We will be adding more detail in future releases of Octopus. # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.custom_action_response_descriptor_octopus_server_web_api_actions_deployment_preview_action_0_with_http_info(environment, tenant, id, async_req=True)
>>> result = thread.get()
:param async_req bool
:param str environment: ID of the environment (required)
:param str tenant: ID of the tenant (required)
:param str id: ID of the resource (required)
:return: DeploymentPreviewResource
If the method is called asynchronously,
returns the request thread.
"""
all_params = ['environment', 'tenant', 'id'] # noqa: E501
all_params.append('async_req')
all_params.append('_return_http_data_only')
all_params.append('_preload_content')
all_params.append('_request_timeout')
params = locals()
for key, val in six.iteritems(params['kwargs']):
if key not in all_params:
raise TypeError(
"Got an unexpected keyword argument '%s'"
" to method custom_action_response_descriptor_octopus_server_web_api_actions_deployment_preview_action_0" % key
)
params[key] = val
del params['kwargs']
# verify the required parameter 'environment' is set
if ('environment' not in params or
params['environment'] is None):
raise ValueError("Missing the required parameter `environment` when calling `custom_action_response_descriptor_octopus_server_web_api_actions_deployment_preview_action_0`") # noqa: E501
# verify the required parameter 'tenant' is set
if ('tenant' not in params or
params['tenant'] is None):
raise ValueError("Missing the required parameter `tenant` when calling `custom_action_response_descriptor_octopus_server_web_api_actions_deployment_preview_action_0`") # noqa: E501
# verify the required parameter 'id' is set
if ('id' not in params or
params['id'] is None):
raise ValueError("Missing the required parameter `id` when calling `custom_action_response_descriptor_octopus_server_web_api_actions_deployment_preview_action_0`") # noqa: E501
collection_formats = {}
path_params = {}
if 'environment' in params:
path_params['environment'] = params['environment'] # noqa: E501
if 'tenant' in params:
path_params['tenant'] = params['tenant'] # noqa: E501
if 'id' in params:
path_params['id'] = params['id'] # noqa: E501
query_params = []
header_params = {}
form_params = []
local_var_files = {}
body_params = None
# HTTP header `Accept`
header_params['Accept'] = self.api_client.select_header_accept(
['application/json']) # noqa: E501
# Authentication setting
auth_settings = ['APIKeyHeader', 'APIKeyQuery', 'NugetApiKeyHeader'] # noqa: E501
return self.api_client.call_api(
'/api/releases/{id}/deployments/preview/{environment}/{tenant}', 'GET',
path_params,
query_params,
header_params,
body=body_params,
post_params=form_params,
files=local_var_files,
response_type='DeploymentPreviewResource', # noqa: E501
auth_settings=auth_settings,
async_req=params.get('async_req'),
_return_http_data_only=params.get('_return_http_data_only'),
_preload_content=params.get('_preload_content', True),
_request_timeout=params.get('_request_timeout'),
collection_formats=collection_formats)
def custom_action_response_descriptor_octopus_server_web_api_actions_deployment_preview_action_spaces(self, base_space_id, environment, tenant, id, **kwargs): # noqa: E501
"""custom_action_response_descriptor_octopus_server_web_api_actions_deployment_preview_action_spaces # noqa: E501
Gets a document that describes what steps will/won't be run during a deployment to a given environment. NOTE: This definition is not complete. We will be adding more detail in future releases of Octopus. # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.custom_action_response_descriptor_octopus_server_web_api_actions_deployment_preview_action_spaces(base_space_id, environment, tenant, id, async_req=True)
>>> result = thread.get()
:param async_req bool
:param str base_space_id: ID of the space (required)
:param str environment: ID of the environment (required)
:param str tenant: ID of the tenant (required)
:param str id: ID of the resource (required)
:return: DeploymentPreviewResource
If the method is called asynchronously,
returns the request thread.
"""
kwargs['_return_http_data_only'] = True
if kwargs.get('async_req'):
return self.custom_action_response_descriptor_octopus_server_web_api_actions_deployment_preview_action_spaces_with_http_info(base_space_id, environment, tenant, id, **kwargs) # noqa: E501
else:
(data) = self.custom_action_response_descriptor_octopus_server_web_api_actions_deployment_preview_action_spaces_with_http_info(base_space_id, environment, tenant, id, **kwargs) # noqa: E501
return data
def custom_action_response_descriptor_octopus_server_web_api_actions_deployment_preview_action_spaces_with_http_info(self, base_space_id, environment, tenant, id, **kwargs): # noqa: E501
"""custom_action_response_descriptor_octopus_server_web_api_actions_deployment_preview_action_spaces # noqa: E501
Gets a document that describes what steps will/won't be run during a deployment to a given environment. NOTE: This definition is not complete. We will be adding more detail in future releases of Octopus. # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.custom_action_response_descriptor_octopus_server_web_api_actions_deployment_preview_action_spaces_with_http_info(base_space_id, environment, tenant, id, async_req=True)
>>> result = thread.get()
:param async_req bool
:param str base_space_id: ID of the space (required)
:param str environment: ID of the environment (required)
:param str tenant: ID of the tenant (required)
:param str id: ID of the resource (required)
:return: DeploymentPreviewResource
If the method is called asynchronously,
returns the request thread.
"""
all_params = ['base_space_id', 'environment', 'tenant', 'id'] # noqa: E501
all_params.append('async_req')
all_params.append('_return_http_data_only')
all_params.append('_preload_content')
all_params.append('_request_timeout')
params = locals()
for key, val in six.iteritems(params['kwargs']):
if key not in all_params:
raise TypeError(
"Got an unexpected keyword argument '%s'"
" to method custom_action_response_descriptor_octopus_server_web_api_actions_deployment_preview_action_spaces" % key
)
params[key] = val
del params['kwargs']
# verify the required parameter 'base_space_id' is set
if ('base_space_id' not in params or
params['base_space_id'] is None):
raise ValueError("Missing the required parameter `base_space_id` when calling `custom_action_response_descriptor_octopus_server_web_api_actions_deployment_preview_action_spaces`") # noqa: E501
# verify the required parameter 'environment' is set
if ('environment' not in params or
params['environment'] is None):
raise ValueError("Missing the required parameter `environment` when calling `custom_action_response_descriptor_octopus_server_web_api_actions_deployment_preview_action_spaces`") # noqa: E501
# verify the required parameter 'tenant' is set
if ('tenant' not in params or
params['tenant'] is None):
raise ValueError("Missing the required parameter `tenant` when calling `custom_action_response_descriptor_octopus_server_web_api_actions_deployment_preview_action_spaces`") # noqa: E501
# verify the required parameter 'id' is set
if ('id' not in params or
params['id'] is None):
raise ValueError("Missing the required parameter `id` when calling `custom_action_response_descriptor_octopus_server_web_api_actions_deployment_preview_action_spaces`") # noqa: E501
collection_formats = {}
path_params = {}
if 'base_space_id' in params:
path_params['baseSpaceId'] = params['base_space_id'] # noqa: E501
if 'environment' in params:
path_params['environment'] = params['environment'] # noqa: E501
if 'tenant' in params:
path_params['tenant'] = params['tenant'] # noqa: E501
if 'id' in params:
path_params['id'] = params['id'] # noqa: E501
query_params = []
header_params = {}
form_params = []
local_var_files = {}
body_params = None
# HTTP header `Accept`
header_params['Accept'] = self.api_client.select_header_accept(
['application/json']) # noqa: E501
# Authentication setting
auth_settings = ['APIKeyHeader', 'APIKeyQuery', 'NugetApiKeyHeader'] # noqa: E501
return self.api_client.call_api(
'/api/{baseSpaceId}/releases/{id}/deployments/preview/{environment}', 'GET',
path_params,
query_params,
header_params,
body=body_params,
post_params=form_params,
files=local_var_files,
response_type='DeploymentPreviewResource', # noqa: E501
auth_settings=auth_settings,
async_req=params.get('async_req'),
_return_http_data_only=params.get('_return_http_data_only'),
_preload_content=params.get('_preload_content', True),
_request_timeout=params.get('_request_timeout'),
collection_formats=collection_formats)
def custom_action_response_descriptor_octopus_server_web_api_actions_deployment_preview_action_spaces_0(self, base_space_id, environment, tenant, id, **kwargs): # noqa: E501
"""custom_action_response_descriptor_octopus_server_web_api_actions_deployment_preview_action_spaces_0 # noqa: E501
Gets a document that describes what steps will/won't be run during a deployment to a given environment for the given tenant. NOTE: This definition is not complete. We will be adding more detail in future releases of Octopus. # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.custom_action_response_descriptor_octopus_server_web_api_actions_deployment_preview_action_spaces_0(base_space_id, environment, tenant, id, async_req=True)
>>> result = thread.get()
:param async_req bool
:param str base_space_id: ID of the space (required)
:param str environment: ID of the environment (required)
:param str tenant: ID of the tenant (required)
:param str id: ID of the resource (required)
:return: DeploymentPreviewResource
If the method is called asynchronously,
returns the request thread.
"""
kwargs['_return_http_data_only'] = True
if kwargs.get('async_req'):
return self.custom_action_response_descriptor_octopus_server_web_api_actions_deployment_preview_action_spaces_0_with_http_info(base_space_id, environment, tenant, id, **kwargs) # noqa: E501
else:
(data) = self.custom_action_response_descriptor_octopus_server_web_api_actions_deployment_preview_action_spaces_0_with_http_info(base_space_id, environment, tenant, id, **kwargs) # noqa: E501
return data
def custom_action_response_descriptor_octopus_server_web_api_actions_deployment_preview_action_spaces_0_with_http_info(self, base_space_id, environment, tenant, id, **kwargs): # noqa: E501
"""custom_action_response_descriptor_octopus_server_web_api_actions_deployment_preview_action_spaces_0 # noqa: E501
Gets a document that describes what steps will/won't be run during a deployment to a given environment for the given tenant. NOTE: This definition is not complete. We will be adding more detail in future releases of Octopus. # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.custom_action_response_descriptor_octopus_server_web_api_actions_deployment_preview_action_spaces_0_with_http_info(base_space_id, environment, tenant, id, async_req=True)
>>> result = thread.get()
:param async_req bool
:param str base_space_id: ID of the space (required)
:param str environment: ID of the environment (required)
:param str tenant: ID of the tenant (required)
:param str id: ID of the resource (required)
:return: DeploymentPreviewResource
If the method is called asynchronously,
returns the request thread.
"""
all_params = ['base_space_id', 'environment', 'tenant', 'id'] # noqa: E501
all_params.append('async_req')
all_params.append('_return_http_data_only')
all_params.append('_preload_content')
all_params.append('_request_timeout')
params = locals()
for key, val in six.iteritems(params['kwargs']):
if key not in all_params:
raise TypeError(
"Got an unexpected keyword argument '%s'"
" to method custom_action_response_descriptor_octopus_server_web_api_actions_deployment_preview_action_spaces_0" % key
)
params[key] = val
del params['kwargs']
# verify the required parameter 'base_space_id' is set
if ('base_space_id' not in params or
params['base_space_id'] is None):
raise ValueError("Missing the required parameter `base_space_id` when calling `custom_action_response_descriptor_octopus_server_web_api_actions_deployment_preview_action_spaces_0`") # noqa: E501
# verify the required parameter 'environment' is set
if ('environment' not in params or
params['environment'] is None):
raise ValueError("Missing the required parameter `environment` when calling `custom_action_response_descriptor_octopus_server_web_api_actions_deployment_preview_action_spaces_0`") # noqa: E501
# verify the required parameter 'tenant' is set
if ('tenant' not in params or
params['tenant'] is None):
raise ValueError("Missing the required parameter `tenant` when calling `custom_action_response_descriptor_octopus_server_web_api_actions_deployment_preview_action_spaces_0`") # noqa: E501
# verify the required parameter 'id' is set
if ('id' not in params or
params['id'] is None):
raise ValueError("Missing the required parameter `id` when calling `custom_action_response_descriptor_octopus_server_web_api_actions_deployment_preview_action_spaces_0`") # noqa: E501
collection_formats = {}
path_params = {}
if 'base_space_id' in params:
path_params['baseSpaceId'] = params['base_space_id'] # noqa: E501
if 'environment' in params:
path_params['environment'] = params['environment'] # noqa: E501
if 'tenant' in params:
path_params['tenant'] = params['tenant'] # noqa: E501
if 'id' in params:
path_params['id'] = params['id'] # noqa: E501
query_params = []
header_params = {}
form_params = []
local_var_files = {}
body_params = None
# HTTP header `Accept`
header_params['Accept'] = self.api_client.select_header_accept(
['application/json']) # noqa: E501
# Authentication setting
auth_settings = ['APIKeyHeader', 'APIKeyQuery', 'NugetApiKeyHeader'] # noqa: E501
return self.api_client.call_api(
'/api/{baseSpaceId}/releases/{id}/deployments/preview/{environment}/{tenant}', 'GET',
path_params,
query_params,
header_params,
body=body_params,
post_params=form_params,
files=local_var_files,
response_type='DeploymentPreviewResource', # noqa: E501
auth_settings=auth_settings,
async_req=params.get('async_req'),
_return_http_data_only=params.get('_return_http_data_only'),
_preload_content=params.get('_preload_content', True),
_request_timeout=params.get('_request_timeout'),
collection_formats=collection_formats)
def custom_action_response_descriptor_octopus_server_web_api_actions_deployment_template_action(self, id, **kwargs): # noqa: E501
"""custom_action_response_descriptor_octopus_server_web_api_actions_deployment_template_action # noqa: E501
Gets all of the information necessary for creating or editing a deployment for this release. NOTE: This definition is not complete. We will be adding more detail in future releases of Octopus. # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.custom_action_response_descriptor_octopus_server_web_api_actions_deployment_template_action(id, async_req=True)
>>> result = thread.get()
:param async_req bool
:param str id: ID of the resource (required)
:return: DeploymentTemplateResource
If the method is called asynchronously,
returns the request thread.
"""
kwargs['_return_http_data_only'] = True
if kwargs.get('async_req'):
return self.custom_action_response_descriptor_octopus_server_web_api_actions_deployment_template_action_with_http_info(id, **kwargs) # noqa: E501
else:
(data) = self.custom_action_response_descriptor_octopus_server_web_api_actions_deployment_template_action_with_http_info(id, **kwargs) # noqa: E501
return data
def custom_action_response_descriptor_octopus_server_web_api_actions_deployment_template_action_with_http_info(self, id, **kwargs): # noqa: E501
"""custom_action_response_descriptor_octopus_server_web_api_actions_deployment_template_action # noqa: E501
Gets all of the information necessary for creating or editing a deployment for this release. NOTE: This definition is not complete. We will be adding more detail in future releases of Octopus. # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.custom_action_response_descriptor_octopus_server_web_api_actions_deployment_template_action_with_http_info(id, async_req=True)
>>> result = thread.get()
:param async_req bool
:param str id: ID of the resource (required)
:return: DeploymentTemplateResource
If the method is called asynchronously,
returns the request thread.
"""
all_params = ['id'] # noqa: E501
all_params.append('async_req')
all_params.append('_return_http_data_only')
all_params.append('_preload_content')
all_params.append('_request_timeout')
params = locals()
for key, val in six.iteritems(params['kwargs']):
if key not in all_params:
raise TypeError(
"Got an unexpected keyword argument '%s'"
" to method custom_action_response_descriptor_octopus_server_web_api_actions_deployment_template_action" % key
)
params[key] = val
del params['kwargs']
# verify the required parameter 'id' is set
if ('id' not in params or
params['id'] is None):
raise ValueError("Missing the required parameter `id` when calling `custom_action_response_descriptor_octopus_server_web_api_actions_deployment_template_action`") # noqa: E501
collection_formats = {}
path_params = {}
if 'id' in params:
path_params['id'] = params['id'] # noqa: E501
query_params = []
header_params = {}
form_params = []
local_var_files = {}
body_params = None
# HTTP header `Accept`
header_params['Accept'] = self.api_client.select_header_accept(
['application/json']) # noqa: E501
# Authentication setting
auth_settings = ['APIKeyHeader', 'APIKeyQuery', 'NugetApiKeyHeader'] # noqa: E501
return self.api_client.call_api(
'/api/releases/{id}/deployments/template', 'GET',
path_params,
query_params,
header_params,
body=body_params,
post_params=form_params,
files=local_var_files,
response_type='DeploymentTemplateResource', # noqa: E501
auth_settings=auth_settings,
async_req=params.get('async_req'),
_return_http_data_only=params.get('_return_http_data_only'),
_preload_content=params.get('_preload_content', True),
_request_timeout=params.get('_request_timeout'),
collection_formats=collection_formats)
def custom_action_response_descriptor_octopus_server_web_api_actions_deployment_template_action_spaces(self, base_space_id, id, **kwargs): # noqa: E501
"""custom_action_response_descriptor_octopus_server_web_api_actions_deployment_template_action_spaces # noqa: E501
Gets all of the information necessary for creating or editing a deployment for this release. NOTE: This definition is not complete. We will be adding more detail in future releases of Octopus. # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.custom_action_response_descriptor_octopus_server_web_api_actions_deployment_template_action_spaces(base_space_id, id, async_req=True)
>>> result = thread.get()
:param async_req bool
:param str base_space_id: ID of the space (required)
:param str id: ID of the resource (required)
:return: DeploymentTemplateResource
If the method is called asynchronously,
returns the request thread.
"""
kwargs['_return_http_data_only'] = True
if kwargs.get('async_req'):
return self.custom_action_response_descriptor_octopus_server_web_api_actions_deployment_template_action_spaces_with_http_info(base_space_id, id, **kwargs) # noqa: E501
else:
(data) = self.custom_action_response_descriptor_octopus_server_web_api_actions_deployment_template_action_spaces_with_http_info(base_space_id, id, **kwargs) # noqa: E501
return data
def custom_action_response_descriptor_octopus_server_web_api_actions_deployment_template_action_spaces_with_http_info(self, base_space_id, id, **kwargs): # noqa: E501
"""custom_action_response_descriptor_octopus_server_web_api_actions_deployment_template_action_spaces # noqa: E501
Gets all of the information necessary for creating or editing a deployment for this release. NOTE: This definition is not complete. We will be adding more detail in future releases of Octopus. # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.custom_action_response_descriptor_octopus_server_web_api_actions_deployment_template_action_spaces_with_http_info(base_space_id, id, async_req=True)
>>> result = thread.get()
:param async_req bool
:param str base_space_id: ID of the space (required)
:param str id: ID of the resource (required)
:return: DeploymentTemplateResource
If the method is called asynchronously,
returns the request thread.
"""
all_params = ['base_space_id', 'id'] # noqa: E501
all_params.append('async_req')
all_params.append('_return_http_data_only')
all_params.append('_preload_content')
all_params.append('_request_timeout')
params = locals()
for key, val in six.iteritems(params['kwargs']):
if key not in all_params:
raise TypeError(
"Got an unexpected keyword argument '%s'"
" to method custom_action_response_descriptor_octopus_server_web_api_actions_deployment_template_action_spaces" % key
)
params[key] = val
del params['kwargs']
# verify the required parameter 'base_space_id' is set
if ('base_space_id' not in params or
params['base_space_id'] is None):
raise ValueError("Missing the required parameter `base_space_id` when calling `custom_action_response_descriptor_octopus_server_web_api_actions_deployment_template_action_spaces`") # noqa: E501
# verify the required parameter 'id' is set
if ('id' not in params or
params['id'] is None):
raise ValueError("Missing the required parameter `id` when calling `custom_action_response_descriptor_octopus_server_web_api_actions_deployment_template_action_spaces`") # noqa: E501
collection_formats = {}
path_params = {}
if 'base_space_id' in params:
path_params['baseSpaceId'] = params['base_space_id'] # noqa: E501
if 'id' in params:
path_params['id'] = params['id'] # noqa: E501
query_params = []
header_params = {}
form_params = []
local_var_files = {}
body_params = None
# HTTP header `Accept`
header_params['Accept'] = self.api_client.select_header_accept(
['application/json']) # noqa: E501
# Authentication setting
auth_settings = ['APIKeyHeader', 'APIKeyQuery', 'NugetApiKeyHeader'] # noqa: E501
return self.api_client.call_api(
'/api/{baseSpaceId}/releases/{id}/deployments/template', 'GET',
path_params,
query_params,
header_params,
body=body_params,
post_params=form_params,
files=local_var_files,
response_type='DeploymentTemplateResource', # noqa: E501
auth_settings=auth_settings,
async_req=params.get('async_req'),
_return_http_data_only=params.get('_return_http_data_only'),
_preload_content=params.get('_preload_content', True),
_request_timeout=params.get('_request_timeout'),
collection_formats=collection_formats)
def custom_action_response_descriptor_octopus_server_web_api_actions_release_by_project_and_version_responder(self, version, id, **kwargs): # noqa: E501
"""custom_action_response_descriptor_octopus_server_web_api_actions_release_by_project_and_version_responder # noqa: E501
Gets a single release by project ID and version number. NOTE: This definition is not complete. We will be adding more detail in future releases of Octopus. # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.custom_action_response_descriptor_octopus_server_web_api_actions_release_by_project_and_version_responder(version, id, async_req=True)
>>> result = thread.get()
:param async_req bool
:param str version: ERROR - NOT DEFINED (required)
:param str id: ID of the resource (required)
:return: ReleaseResource
If the method is called asynchronously,
returns the request thread.
"""
kwargs['_return_http_data_only'] = True
if kwargs.get('async_req'):
return self.custom_action_response_descriptor_octopus_server_web_api_actions_release_by_project_and_version_responder_with_http_info(version, id, **kwargs) # noqa: E501
else:
(data) = self.custom_action_response_descriptor_octopus_server_web_api_actions_release_by_project_and_version_responder_with_http_info(version, id, **kwargs) # noqa: E501
return data
def custom_action_response_descriptor_octopus_server_web_api_actions_release_by_project_and_version_responder_with_http_info(self, version, id, **kwargs): # noqa: E501
"""custom_action_response_descriptor_octopus_server_web_api_actions_release_by_project_and_version_responder # noqa: E501
Gets a single release by project ID and version number. NOTE: This definition is not complete. We will be adding more detail in future releases of Octopus. # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.custom_action_response_descriptor_octopus_server_web_api_actions_release_by_project_and_version_responder_with_http_info(version, id, async_req=True)
>>> result = thread.get()
:param async_req bool
:param str version: ERROR - NOT DEFINED (required)
:param str id: ID of the resource (required)
:return: ReleaseResource
If the method is called asynchronously,
returns the request thread.
"""
all_params = ['version', 'id'] # noqa: E501
all_params.append('async_req')
all_params.append('_return_http_data_only')
all_params.append('_preload_content')
all_params.append('_request_timeout')
params = locals()
for key, val in six.iteritems(params['kwargs']):
if key not in all_params:
raise TypeError(
"Got an unexpected keyword argument '%s'"
" to method custom_action_response_descriptor_octopus_server_web_api_actions_release_by_project_and_version_responder" % key
)
params[key] = val
del params['kwargs']
# verify the required parameter 'version' is set
if ('version' not in params or
params['version'] is None):
raise ValueError("Missing the required parameter `version` when calling `custom_action_response_descriptor_octopus_server_web_api_actions_release_by_project_and_version_responder`") # noqa: E501
# verify the required parameter 'id' is set
if ('id' not in params or
params['id'] is None):
raise ValueError("Missing the required parameter `id` when calling `custom_action_response_descriptor_octopus_server_web_api_actions_release_by_project_and_version_responder`") # noqa: E501
collection_formats = {}
path_params = {}
if 'version' in params:
path_params['version'] = params['version'] # noqa: E501
if 'id' in params:
path_params['id'] = params['id'] # noqa: E501
query_params = []
header_params = {}
form_params = []
local_var_files = {}
body_params = None
# HTTP header `Accept`
header_params['Accept'] = self.api_client.select_header_accept(
['application/json']) # noqa: E501
# Authentication setting
auth_settings = ['APIKeyHeader', 'APIKeyQuery', 'NugetApiKeyHeader'] # noqa: E501
return self.api_client.call_api(
'/api/projects/{id}/releases/{version}', 'GET',
path_params,
query_params,
header_params,
body=body_params,
post_params=form_params,
files=local_var_files,
response_type='ReleaseResource', # noqa: E501
auth_settings=auth_settings,
async_req=params.get('async_req'),
_return_http_data_only=params.get('_return_http_data_only'),
_preload_content=params.get('_preload_content', True),
_request_timeout=params.get('_request_timeout'),
collection_formats=collection_formats)
def custom_action_response_descriptor_octopus_server_web_api_actions_release_by_project_and_version_responder_spaces(self, base_space_id, version, id, **kwargs): # noqa: E501
"""custom_action_response_descriptor_octopus_server_web_api_actions_release_by_project_and_version_responder_spaces # noqa: E501
Gets a single release by project ID and version number. NOTE: This definition is not complete. We will be adding more detail in future releases of Octopus. # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.custom_action_response_descriptor_octopus_server_web_api_actions_release_by_project_and_version_responder_spaces(base_space_id, version, id, async_req=True)
>>> result = thread.get()
:param async_req bool
:param str base_space_id: ID of the space (required)
:param str version: ERROR - NOT DEFINED (required)
:param str id: ID of the resource (required)
:return: ReleaseResource
If the method is called asynchronously,
returns the request thread.
"""
kwargs['_return_http_data_only'] = True
if kwargs.get('async_req'):
return self.custom_action_response_descriptor_octopus_server_web_api_actions_release_by_project_and_version_responder_spaces_with_http_info(base_space_id, version, id, **kwargs) # noqa: E501
else:
(data) = self.custom_action_response_descriptor_octopus_server_web_api_actions_release_by_project_and_version_responder_spaces_with_http_info(base_space_id, version, id, **kwargs) # noqa: E501
return data
def custom_action_response_descriptor_octopus_server_web_api_actions_release_by_project_and_version_responder_spaces_with_http_info(self, base_space_id, version, id, **kwargs): # noqa: E501
"""custom_action_response_descriptor_octopus_server_web_api_actions_release_by_project_and_version_responder_spaces # noqa: E501
Gets a single release by project ID and version number. NOTE: This definition is not complete. We will be adding more detail in future releases of Octopus. # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.custom_action_response_descriptor_octopus_server_web_api_actions_release_by_project_and_version_responder_spaces_with_http_info(base_space_id, version, id, async_req=True)
>>> result = thread.get()
:param async_req bool
:param str base_space_id: ID of the space (required)
:param str version: ERROR - NOT DEFINED (required)
:param str id: ID of the resource (required)
:return: ReleaseResource
If the method is called asynchronously,
returns the request thread.
"""
all_params = ['base_space_id', 'version', 'id'] # noqa: E501
all_params.append('async_req')
all_params.append('_return_http_data_only')
all_params.append('_preload_content')
all_params.append('_request_timeout')
params = locals()
for key, val in six.iteritems(params['kwargs']):
if key not in all_params:
raise TypeError(
"Got an unexpected keyword argument '%s'"
" to method custom_action_response_descriptor_octopus_server_web_api_actions_release_by_project_and_version_responder_spaces" % key
)
params[key] = val
del params['kwargs']
# verify the required parameter 'base_space_id' is set
if ('base_space_id' not in params or
params['base_space_id'] is None):
raise ValueError("Missing the required parameter `base_space_id` when calling `custom_action_response_descriptor_octopus_server_web_api_actions_release_by_project_and_version_responder_spaces`") # noqa: E501
# verify the required parameter 'version' is set
if ('version' not in params or
params['version'] is None):
raise ValueError("Missing the required parameter `version` when calling `custom_action_response_descriptor_octopus_server_web_api_actions_release_by_project_and_version_responder_spaces`") # noqa: E501
# verify the required parameter 'id' is set
if ('id' not in params or
params['id'] is None):
raise ValueError("Missing the required parameter `id` when calling `custom_action_response_descriptor_octopus_server_web_api_actions_release_by_project_and_version_responder_spaces`") # noqa: E501
collection_formats = {}
path_params = {}
if 'base_space_id' in params:
path_params['baseSpaceId'] = params['base_space_id'] # noqa: E501
if 'version' in params:
path_params['version'] = params['version'] # noqa: E501
if 'id' in params:
path_params['id'] = params['id'] # noqa: E501
query_params = []
header_params = {}
form_params = []
local_var_files = {}
body_params = None
# HTTP header `Accept`
header_params['Accept'] = self.api_client.select_header_accept(
['application/json']) # noqa: E501
# Authentication setting
auth_settings = ['APIKeyHeader', 'APIKeyQuery', 'NugetApiKeyHeader'] # noqa: E501
return self.api_client.call_api(
'/api/{baseSpaceId}/projects/{id}/releases/{version}', 'GET',
path_params,
query_params,
header_params,
body=body_params,
post_params=form_params,
files=local_var_files,
response_type='ReleaseResource', # noqa: E501
auth_settings=auth_settings,
async_req=params.get('async_req'),
_return_http_data_only=params.get('_return_http_data_only'),
_preload_content=params.get('_preload_content', True),
_request_timeout=params.get('_request_timeout'),
collection_formats=collection_formats)
def custom_action_response_descriptor_octopus_server_web_api_actions_release_lifecycle_progression_action(self, id, **kwargs): # noqa: E501
"""custom_action_response_descriptor_octopus_server_web_api_actions_release_lifecycle_progression_action # noqa: E501
Gets all of the information necessary for creating or editing a deployment for this release. NOTE: This definition is not complete. We will be adding more detail in future releases of Octopus. # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.custom_action_response_descriptor_octopus_server_web_api_actions_release_lifecycle_progression_action(id, async_req=True)
>>> result = thread.get()
:param async_req bool
:param str id: ID of the resource (required)
:return: LifecycleProgressionResource
If the method is called asynchronously,
returns the request thread.
"""
kwargs['_return_http_data_only'] = True
if kwargs.get('async_req'):
return self.custom_action_response_descriptor_octopus_server_web_api_actions_release_lifecycle_progression_action_with_http_info(id, **kwargs) # noqa: E501
else:
(data) = self.custom_action_response_descriptor_octopus_server_web_api_actions_release_lifecycle_progression_action_with_http_info(id, **kwargs) # noqa: E501
return data
def custom_action_response_descriptor_octopus_server_web_api_actions_release_lifecycle_progression_action_with_http_info(self, id, **kwargs): # noqa: E501
"""custom_action_response_descriptor_octopus_server_web_api_actions_release_lifecycle_progression_action # noqa: E501
Gets all of the information necessary for creating or editing a deployment for this release. NOTE: This definition is not complete. We will be adding more detail in future releases of Octopus. # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.custom_action_response_descriptor_octopus_server_web_api_actions_release_lifecycle_progression_action_with_http_info(id, async_req=True)
>>> result = thread.get()
:param async_req bool
:param str id: ID of the resource (required)
:return: LifecycleProgressionResource
If the method is called asynchronously,
returns the request thread.
"""
all_params = ['id'] # noqa: E501
all_params.append('async_req')
all_params.append('_return_http_data_only')
all_params.append('_preload_content')
all_params.append('_request_timeout')
params = locals()
for key, val in six.iteritems(params['kwargs']):
if key not in all_params:
raise TypeError(
"Got an unexpected keyword argument '%s'"
" to method custom_action_response_descriptor_octopus_server_web_api_actions_release_lifecycle_progression_action" % key
)
params[key] = val
del params['kwargs']
# verify the required parameter 'id' is set
if ('id' not in params or
params['id'] is None):
raise ValueError("Missing the required parameter `id` when calling `custom_action_response_descriptor_octopus_server_web_api_actions_release_lifecycle_progression_action`") # noqa: E501
collection_formats = {}
path_params = {}
if 'id' in params:
path_params['id'] = params['id'] # noqa: E501
query_params = []
header_params = {}
form_params = []
local_var_files = {}
body_params = None
# HTTP header `Accept`
header_params['Accept'] = self.api_client.select_header_accept(
['application/json']) # noqa: E501
# Authentication setting
auth_settings = ['APIKeyHeader', 'APIKeyQuery', 'NugetApiKeyHeader'] # noqa: E501
return self.api_client.call_api(
'/api/releases/{id}/progression', 'GET',
path_params,
query_params,
header_params,
body=body_params,
post_params=form_params,
files=local_var_files,
response_type='LifecycleProgressionResource', # noqa: E501
auth_settings=auth_settings,
async_req=params.get('async_req'),
_return_http_data_only=params.get('_return_http_data_only'),
_preload_content=params.get('_preload_content', True),
_request_timeout=params.get('_request_timeout'),
collection_formats=collection_formats)
def custom_action_response_descriptor_octopus_server_web_api_actions_release_lifecycle_progression_action_spaces(self, base_space_id, id, **kwargs): # noqa: E501
"""custom_action_response_descriptor_octopus_server_web_api_actions_release_lifecycle_progression_action_spaces # noqa: E501
Gets all of the information necessary for creating or editing a deployment for this release. NOTE: This definition is not complete. We will be adding more detail in future releases of Octopus. # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.custom_action_response_descriptor_octopus_server_web_api_actions_release_lifecycle_progression_action_spaces(base_space_id, id, async_req=True)
>>> result = thread.get()
:param async_req bool
:param str base_space_id: ID of the space (required)
:param str id: ID of the resource (required)
:return: LifecycleProgressionResource
If the method is called asynchronously,
returns the request thread.
"""
kwargs['_return_http_data_only'] = True
if kwargs.get('async_req'):
return self.custom_action_response_descriptor_octopus_server_web_api_actions_release_lifecycle_progression_action_spaces_with_http_info(base_space_id, id, **kwargs) # noqa: E501
else:
(data) = self.custom_action_response_descriptor_octopus_server_web_api_actions_release_lifecycle_progression_action_spaces_with_http_info(base_space_id, id, **kwargs) # noqa: E501
return data
def custom_action_response_descriptor_octopus_server_web_api_actions_release_lifecycle_progression_action_spaces_with_http_info(self, base_space_id, id, **kwargs): # noqa: E501
"""custom_action_response_descriptor_octopus_server_web_api_actions_release_lifecycle_progression_action_spaces # noqa: E501
Gets all of the information necessary for creating or editing a deployment for this release. NOTE: This definition is not complete. We will be adding more detail in future releases of Octopus. # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.custom_action_response_descriptor_octopus_server_web_api_actions_release_lifecycle_progression_action_spaces_with_http_info(base_space_id, id, async_req=True)
>>> result = thread.get()
:param async_req bool
:param str base_space_id: ID of the space (required)
:param str id: ID of the resource (required)
:return: LifecycleProgressionResource
If the method is called asynchronously,
returns the request thread.
"""
all_params = ['base_space_id', 'id'] # noqa: E501
all_params.append('async_req')
all_params.append('_return_http_data_only')
all_params.append('_preload_content')
all_params.append('_request_timeout')
params = locals()
for key, val in six.iteritems(params['kwargs']):
if key not in all_params:
raise TypeError(
"Got an unexpected keyword argument '%s'"
" to method custom_action_response_descriptor_octopus_server_web_api_actions_release_lifecycle_progression_action_spaces" % key
)
params[key] = val
del params['kwargs']
# verify the required parameter 'base_space_id' is set
if ('base_space_id' not in params or
params['base_space_id'] is None):
raise ValueError("Missing the required parameter `base_space_id` when calling `custom_action_response_descriptor_octopus_server_web_api_actions_release_lifecycle_progression_action_spaces`") # noqa: E501
# verify the required parameter 'id' is set
if ('id' not in params or
params['id'] is None):
raise ValueError("Missing the required parameter `id` when calling `custom_action_response_descriptor_octopus_server_web_api_actions_release_lifecycle_progression_action_spaces`") # noqa: E501
collection_formats = {}
path_params = {}
if 'base_space_id' in params:
path_params['baseSpaceId'] = params['base_space_id'] # noqa: E501
if 'id' in params:
path_params['id'] = params['id'] # noqa: E501
query_params = []
header_params = {}
form_params = []
local_var_files = {}
body_params = None
# HTTP header `Accept`
header_params['Accept'] = self.api_client.select_header_accept(
['application/json']) # noqa: E501
# Authentication setting
auth_settings = ['APIKeyHeader', 'APIKeyQuery', 'NugetApiKeyHeader'] # noqa: E501
return self.api_client.call_api(
'/api/{baseSpaceId}/releases/{id}/progression', 'GET',
path_params,
query_params,
header_params,
body=body_params,
post_params=form_params,
files=local_var_files,
response_type='LifecycleProgressionResource', # noqa: E501
auth_settings=auth_settings,
async_req=params.get('async_req'),
_return_http_data_only=params.get('_return_http_data_only'),
_preload_content=params.get('_preload_content', True),
_request_timeout=params.get('_request_timeout'),
collection_formats=collection_formats)
def custom_action_response_descriptor_octopus_server_web_api_actions_release_snapshot_variables_action(self, id, **kwargs): # noqa: E501
"""custom_action_response_descriptor_octopus_server_web_api_actions_release_snapshot_variables_action # noqa: E501
Refresh the variable snapshots associated with the release. The project's deployment process must not have changed since the release was created. NOTE: This definition is not complete. We will be adding more detail in future releases of Octopus. # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.custom_action_response_descriptor_octopus_server_web_api_actions_release_snapshot_variables_action(id, async_req=True)
>>> result = thread.get()
:param async_req bool
:param str id: ID of the resource (required)
:return: ReleaseResource
If the method is called asynchronously,
returns the request thread.
"""
kwargs['_return_http_data_only'] = True
if kwargs.get('async_req'):
return self.custom_action_response_descriptor_octopus_server_web_api_actions_release_snapshot_variables_action_with_http_info(id, **kwargs) # noqa: E501
else:
(data) = self.custom_action_response_descriptor_octopus_server_web_api_actions_release_snapshot_variables_action_with_http_info(id, **kwargs) # noqa: E501
return data
def custom_action_response_descriptor_octopus_server_web_api_actions_release_snapshot_variables_action_with_http_info(self, id, **kwargs): # noqa: E501
"""custom_action_response_descriptor_octopus_server_web_api_actions_release_snapshot_variables_action # noqa: E501
Refresh the variable snapshots associated with the release. The project's deployment process must not have changed since the release was created. NOTE: This definition is not complete. We will be adding more detail in future releases of Octopus. # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.custom_action_response_descriptor_octopus_server_web_api_actions_release_snapshot_variables_action_with_http_info(id, async_req=True)
>>> result = thread.get()
:param async_req bool
:param str id: ID of the resource (required)
:return: ReleaseResource
If the method is called asynchronously,
returns the request thread.
"""
all_params = ['id'] # noqa: E501
all_params.append('async_req')
all_params.append('_return_http_data_only')
all_params.append('_preload_content')
all_params.append('_request_timeout')
params = locals()
for key, val in six.iteritems(params['kwargs']):
if key not in all_params:
raise TypeError(
"Got an unexpected keyword argument '%s'"
" to method custom_action_response_descriptor_octopus_server_web_api_actions_release_snapshot_variables_action" % key
)
params[key] = val
del params['kwargs']
# verify the required parameter 'id' is set
if ('id' not in params or
params['id'] is None):
raise ValueError("Missing the required parameter `id` when calling `custom_action_response_descriptor_octopus_server_web_api_actions_release_snapshot_variables_action`") # noqa: E501
collection_formats = {}
path_params = {}
if 'id' in params:
path_params['id'] = params['id'] # noqa: E501
query_params = []
header_params = {}
form_params = []
local_var_files = {}
body_params = None
# HTTP header `Accept`
header_params['Accept'] = self.api_client.select_header_accept(
['application/json']) # noqa: E501
# Authentication setting
auth_settings = ['APIKeyHeader', 'APIKeyQuery', 'NugetApiKeyHeader'] # noqa: E501
return self.api_client.call_api(
'/api/releases/{id}/snapshot-variables', 'POST',
path_params,
query_params,
header_params,
body=body_params,
post_params=form_params,
files=local_var_files,
response_type='ReleaseResource', # noqa: E501
auth_settings=auth_settings,
async_req=params.get('async_req'),
_return_http_data_only=params.get('_return_http_data_only'),
_preload_content=params.get('_preload_content', True),
_request_timeout=params.get('_request_timeout'),
collection_formats=collection_formats)
def custom_action_response_descriptor_octopus_server_web_api_actions_release_snapshot_variables_action_spaces(self, base_space_id, id, **kwargs): # noqa: E501
"""custom_action_response_descriptor_octopus_server_web_api_actions_release_snapshot_variables_action_spaces # noqa: E501
Refresh the variable snapshots associated with the release. The project's deployment process must not have changed since the release was created. NOTE: This definition is not complete. We will be adding more detail in future releases of Octopus. # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.custom_action_response_descriptor_octopus_server_web_api_actions_release_snapshot_variables_action_spaces(base_space_id, id, async_req=True)
>>> result = thread.get()
:param async_req bool
:param str base_space_id: ID of the space (required)
:param str id: ID of the resource (required)
:return: ReleaseResource
If the method is called asynchronously,
returns the request thread.
"""
kwargs['_return_http_data_only'] = True
if kwargs.get('async_req'):
return self.custom_action_response_descriptor_octopus_server_web_api_actions_release_snapshot_variables_action_spaces_with_http_info(base_space_id, id, **kwargs) # noqa: E501
else:
(data) = self.custom_action_response_descriptor_octopus_server_web_api_actions_release_snapshot_variables_action_spaces_with_http_info(base_space_id, id, **kwargs) # noqa: E501
return data
def custom_action_response_descriptor_octopus_server_web_api_actions_release_snapshot_variables_action_spaces_with_http_info(self, base_space_id, id, **kwargs): # noqa: E501
"""custom_action_response_descriptor_octopus_server_web_api_actions_release_snapshot_variables_action_spaces # noqa: E501
Refresh the variable snapshots associated with the release. The project's deployment process must not have changed since the release was created. NOTE: This definition is not complete. We will be adding more detail in future releases of Octopus. # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.custom_action_response_descriptor_octopus_server_web_api_actions_release_snapshot_variables_action_spaces_with_http_info(base_space_id, id, async_req=True)
>>> result = thread.get()
:param async_req bool
:param str base_space_id: ID of the space (required)
:param str id: ID of the resource (required)
:return: ReleaseResource
If the method is called asynchronously,
returns the request thread.
"""
all_params = ['base_space_id', 'id'] # noqa: E501
all_params.append('async_req')
all_params.append('_return_http_data_only')
all_params.append('_preload_content')
all_params.append('_request_timeout')
params = locals()
for key, val in six.iteritems(params['kwargs']):
if key not in all_params:
raise TypeError(
"Got an unexpected keyword argument '%s'"
" to method custom_action_response_descriptor_octopus_server_web_api_actions_release_snapshot_variables_action_spaces" % key
)
params[key] = val
del params['kwargs']
# verify the required parameter 'base_space_id' is set
if ('base_space_id' not in params or
params['base_space_id'] is None):
raise ValueError("Missing the required parameter `base_space_id` when calling `custom_action_response_descriptor_octopus_server_web_api_actions_release_snapshot_variables_action_spaces`") # noqa: E501
# verify the required parameter 'id' is set
if ('id' not in params or
params['id'] is None):
raise ValueError("Missing the required parameter `id` when calling `custom_action_response_descriptor_octopus_server_web_api_actions_release_snapshot_variables_action_spaces`") # noqa: E501
collection_formats = {}
path_params = {}
if 'base_space_id' in params:
path_params['baseSpaceId'] = params['base_space_id'] # noqa: E501
if 'id' in params:
path_params['id'] = params['id'] # noqa: E501
query_params = []
header_params = {}
form_params = []
local_var_files = {}
body_params = None
# HTTP header `Accept`
header_params['Accept'] = self.api_client.select_header_accept(
['application/json']) # noqa: E501
# Authentication setting
auth_settings = ['APIKeyHeader', 'APIKeyQuery', 'NugetApiKeyHeader'] # noqa: E501
return self.api_client.call_api(
'/api/{baseSpaceId}/releases/{id}/snapshot-variables', 'POST',
path_params,
query_params,
header_params,
body=body_params,
post_params=form_params,
files=local_var_files,
response_type='ReleaseResource', # noqa: E501
auth_settings=auth_settings,
async_req=params.get('async_req'),
_return_http_data_only=params.get('_return_http_data_only'),
_preload_content=params.get('_preload_content', True),
_request_timeout=params.get('_request_timeout'),
collection_formats=collection_formats)
def custom_query_response_descriptor_octopus_server_web_api_actions_list_defects_responder(self, id, **kwargs): # noqa: E501
"""custom_query_response_descriptor_octopus_server_web_api_actions_list_defects_responder # noqa: E501
Gets all defects for a release. NOTE: This definition is not complete. We will be adding more detail in future releases of Octopus. # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.custom_query_response_descriptor_octopus_server_web_api_actions_list_defects_responder(id, async_req=True)
>>> result = thread.get()
:param async_req bool
:param str id: ID of the defect (required)
:return: ResourceCollectionDefectResource
If the method is called asynchronously,
returns the request thread.
"""
kwargs['_return_http_data_only'] = True
if kwargs.get('async_req'):
return self.custom_query_response_descriptor_octopus_server_web_api_actions_list_defects_responder_with_http_info(id, **kwargs) # noqa: E501
else:
(data) = self.custom_query_response_descriptor_octopus_server_web_api_actions_list_defects_responder_with_http_info(id, **kwargs) # noqa: E501
return data
def custom_query_response_descriptor_octopus_server_web_api_actions_list_defects_responder_with_http_info(self, id, **kwargs): # noqa: E501
"""custom_query_response_descriptor_octopus_server_web_api_actions_list_defects_responder # noqa: E501
Gets all defects for a release. NOTE: This definition is not complete. We will be adding more detail in future releases of Octopus. # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.custom_query_response_descriptor_octopus_server_web_api_actions_list_defects_responder_with_http_info(id, async_req=True)
>>> result = thread.get()
:param async_req bool
:param str id: ID of the defect (required)
:return: ResourceCollectionDefectResource
If the method is called asynchronously,
returns the request thread.
"""
all_params = ['id'] # noqa: E501
all_params.append('async_req')
all_params.append('_return_http_data_only')
all_params.append('_preload_content')
all_params.append('_request_timeout')
params = locals()
for key, val in six.iteritems(params['kwargs']):
if key not in all_params:
raise TypeError(
"Got an unexpected keyword argument '%s'"
" to method custom_query_response_descriptor_octopus_server_web_api_actions_list_defects_responder" % key
)
params[key] = val
del params['kwargs']
# verify the required parameter 'id' is set
if ('id' not in params or
params['id'] is None):
raise ValueError("Missing the required parameter `id` when calling `custom_query_response_descriptor_octopus_server_web_api_actions_list_defects_responder`") # noqa: E501
collection_formats = {}
path_params = {}
if 'id' in params:
path_params['id'] = params['id'] # noqa: E501
query_params = []
header_params = {}
form_params = []
local_var_files = {}
body_params = None
# HTTP header `Accept`
header_params['Accept'] = self.api_client.select_header_accept(
['application/json']) # noqa: E501
# Authentication setting
auth_settings = ['APIKeyHeader', 'APIKeyQuery', 'NugetApiKeyHeader'] # noqa: E501
return self.api_client.call_api(
'/api/releases/{id}/defects', 'GET',
path_params,
query_params,
header_params,
body=body_params,
post_params=form_params,
files=local_var_files,
response_type='ResourceCollectionDefectResource', # noqa: E501
auth_settings=auth_settings,
async_req=params.get('async_req'),
_return_http_data_only=params.get('_return_http_data_only'),
_preload_content=params.get('_preload_content', True),
_request_timeout=params.get('_request_timeout'),
collection_formats=collection_formats)
def custom_query_response_descriptor_octopus_server_web_api_actions_list_defects_responder_spaces(self, base_space_id, id, **kwargs): # noqa: E501
"""custom_query_response_descriptor_octopus_server_web_api_actions_list_defects_responder_spaces # noqa: E501
Gets all defects for a release. NOTE: This definition is not complete. We will be adding more detail in future releases of Octopus. # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.custom_query_response_descriptor_octopus_server_web_api_actions_list_defects_responder_spaces(base_space_id, id, async_req=True)
>>> result = thread.get()
:param async_req bool
:param str base_space_id: ID of the space (required)
:param str id: ID of the defect (required)
:return: ResourceCollectionDefectResource
If the method is called asynchronously,
returns the request thread.
"""
kwargs['_return_http_data_only'] = True
if kwargs.get('async_req'):
return self.custom_query_response_descriptor_octopus_server_web_api_actions_list_defects_responder_spaces_with_http_info(base_space_id, id, **kwargs) # noqa: E501
else:
(data) = self.custom_query_response_descriptor_octopus_server_web_api_actions_list_defects_responder_spaces_with_http_info(base_space_id, id, **kwargs) # noqa: E501
return data
def custom_query_response_descriptor_octopus_server_web_api_actions_list_defects_responder_spaces_with_http_info(self, base_space_id, id, **kwargs): # noqa: E501
"""custom_query_response_descriptor_octopus_server_web_api_actions_list_defects_responder_spaces # noqa: E501
Gets all defects for a release. NOTE: This definition is not complete. We will be adding more detail in future releases of Octopus. # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.custom_query_response_descriptor_octopus_server_web_api_actions_list_defects_responder_spaces_with_http_info(base_space_id, id, async_req=True)
>>> result = thread.get()
:param async_req bool
:param str base_space_id: ID of the space (required)
:param str id: ID of the defect (required)
:return: ResourceCollectionDefectResource
If the method is called asynchronously,
returns the request thread.
"""
all_params = ['base_space_id', 'id'] # noqa: E501
all_params.append('async_req')
all_params.append('_return_http_data_only')
all_params.append('_preload_content')
all_params.append('_request_timeout')
params = locals()
for key, val in six.iteritems(params['kwargs']):
if key not in all_params:
raise TypeError(
"Got an unexpected keyword argument '%s'"
" to method custom_query_response_descriptor_octopus_server_web_api_actions_list_defects_responder_spaces" % key
)
params[key] = val
del params['kwargs']
# verify the required parameter 'base_space_id' is set
if ('base_space_id' not in params or
params['base_space_id'] is None):
raise ValueError("Missing the required parameter `base_space_id` when calling `custom_query_response_descriptor_octopus_server_web_api_actions_list_defects_responder_spaces`") # noqa: E501
# verify the required parameter 'id' is set
if ('id' not in params or
params['id'] is None):
raise ValueError("Missing the required parameter `id` when calling `custom_query_response_descriptor_octopus_server_web_api_actions_list_defects_responder_spaces`") # noqa: E501
collection_formats = {}
path_params = {}
if 'base_space_id' in params:
path_params['baseSpaceId'] = params['base_space_id'] # noqa: E501
if 'id' in params:
path_params['id'] = params['id'] # noqa: E501
query_params = []
header_params = {}
form_params = []
local_var_files = {}
body_params = None
# HTTP header `Accept`
header_params['Accept'] = self.api_client.select_header_accept(
['application/json']) # noqa: E501
# Authentication setting
auth_settings = ['APIKeyHeader', 'APIKeyQuery', 'NugetApiKeyHeader'] # noqa: E501
return self.api_client.call_api(
'/api/{baseSpaceId}/releases/{id}/defects', 'GET',
path_params,
query_params,
header_params,
body=body_params,
post_params=form_params,
files=local_var_files,
response_type='ResourceCollectionDefectResource', # noqa: E501
auth_settings=auth_settings,
async_req=params.get('async_req'),
_return_http_data_only=params.get('_return_http_data_only'),
_preload_content=params.get('_preload_content', True),
_request_timeout=params.get('_request_timeout'),
collection_formats=collection_formats)
def delete_on_background_response_descriptor_projects_release_release_resource(self, id, **kwargs): # noqa: E501
"""Delete a ReleaseResource by ID # noqa: E501
Deletes an existing release, along with all of the deployments, tasks and other associated resources belonging to the release. # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.delete_on_background_response_descriptor_projects_release_release_resource(id, async_req=True)
>>> result = thread.get()
:param async_req bool
:param str id: ID of the ReleaseResource to delete (required)
:return: TaskResource
If the method is called asynchronously,
returns the request thread.
"""
kwargs['_return_http_data_only'] = True
if kwargs.get('async_req'):
return self.delete_on_background_response_descriptor_projects_release_release_resource_with_http_info(id, **kwargs) # noqa: E501
else:
(data) = self.delete_on_background_response_descriptor_projects_release_release_resource_with_http_info(id, **kwargs) # noqa: E501
return data
def delete_on_background_response_descriptor_projects_release_release_resource_with_http_info(self, id, **kwargs): # noqa: E501
"""Delete a ReleaseResource by ID # noqa: E501
Deletes an existing release, along with all of the deployments, tasks and other associated resources belonging to the release. # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.delete_on_background_response_descriptor_projects_release_release_resource_with_http_info(id, async_req=True)
>>> result = thread.get()
:param async_req bool
:param str id: ID of the ReleaseResource to delete (required)
:return: TaskResource
If the method is called asynchronously,
returns the request thread.
"""
all_params = ['id'] # noqa: E501
all_params.append('async_req')
all_params.append('_return_http_data_only')
all_params.append('_preload_content')
all_params.append('_request_timeout')
params = locals()
for key, val in six.iteritems(params['kwargs']):
if key not in all_params:
raise TypeError(
"Got an unexpected keyword argument '%s'"
" to method delete_on_background_response_descriptor_projects_release_release_resource" % key
)
params[key] = val
del params['kwargs']
# verify the required parameter 'id' is set
if ('id' not in params or
params['id'] is None):
raise ValueError("Missing the required parameter `id` when calling `delete_on_background_response_descriptor_projects_release_release_resource`") # noqa: E501
collection_formats = {}
path_params = {}
if 'id' in params:
path_params['id'] = params['id'] # noqa: E501
query_params = []
header_params = {}
form_params = []
local_var_files = {}
body_params = None
# HTTP header `Accept`
header_params['Accept'] = self.api_client.select_header_accept(
['application/json']) # noqa: E501
# Authentication setting
auth_settings = ['APIKeyHeader', 'APIKeyQuery', 'NugetApiKeyHeader'] # noqa: E501
return self.api_client.call_api(
'/api/releases/{id}', 'DELETE',
path_params,
query_params,
header_params,
body=body_params,
post_params=form_params,
files=local_var_files,
response_type='TaskResource', # noqa: E501
auth_settings=auth_settings,
async_req=params.get('async_req'),
_return_http_data_only=params.get('_return_http_data_only'),
_preload_content=params.get('_preload_content', True),
_request_timeout=params.get('_request_timeout'),
collection_formats=collection_formats)
def delete_on_background_response_descriptor_projects_release_release_resource_spaces(self, base_space_id, id, **kwargs): # noqa: E501
"""Delete a ReleaseResource by ID # noqa: E501
Deletes an existing release, along with all of the deployments, tasks and other associated resources belonging to the release. # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.delete_on_background_response_descriptor_projects_release_release_resource_spaces(base_space_id, id, async_req=True)
>>> result = thread.get()
:param async_req bool
:param str base_space_id: ID of the space (required)
:param str id: ID of the ReleaseResource to delete (required)
:return: TaskResource
If the method is called asynchronously,
returns the request thread.
"""
kwargs['_return_http_data_only'] = True
if kwargs.get('async_req'):
return self.delete_on_background_response_descriptor_projects_release_release_resource_spaces_with_http_info(base_space_id, id, **kwargs) # noqa: E501
else:
(data) = self.delete_on_background_response_descriptor_projects_release_release_resource_spaces_with_http_info(base_space_id, id, **kwargs) # noqa: E501
return data
def delete_on_background_response_descriptor_projects_release_release_resource_spaces_with_http_info(self, base_space_id, id, **kwargs): # noqa: E501
"""Delete a ReleaseResource by ID # noqa: E501
Deletes an existing release, along with all of the deployments, tasks and other associated resources belonging to the release. # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.delete_on_background_response_descriptor_projects_release_release_resource_spaces_with_http_info(base_space_id, id, async_req=True)
>>> result = thread.get()
:param async_req bool
:param str base_space_id: ID of the space (required)
:param str id: ID of the ReleaseResource to delete (required)
:return: TaskResource
If the method is called asynchronously,
returns the request thread.
"""
all_params = ['base_space_id', 'id'] # noqa: E501
all_params.append('async_req')
all_params.append('_return_http_data_only')
all_params.append('_preload_content')
all_params.append('_request_timeout')
params = locals()
for key, val in six.iteritems(params['kwargs']):
if key not in all_params:
raise TypeError(
"Got an unexpected keyword argument '%s'"
" to method delete_on_background_response_descriptor_projects_release_release_resource_spaces" % key
)
params[key] = val
del params['kwargs']
# verify the required parameter 'base_space_id' is set
if ('base_space_id' not in params or
params['base_space_id'] is None):
raise ValueError("Missing the required parameter `base_space_id` when calling `delete_on_background_response_descriptor_projects_release_release_resource_spaces`") # noqa: E501
# verify the required parameter 'id' is set
if ('id' not in params or
params['id'] is None):
raise ValueError("Missing the required parameter `id` when calling `delete_on_background_response_descriptor_projects_release_release_resource_spaces`") # noqa: E501
collection_formats = {}
path_params = {}
if 'base_space_id' in params:
path_params['baseSpaceId'] = params['base_space_id'] # noqa: E501
if 'id' in params:
path_params['id'] = params['id'] # noqa: E501
query_params = []
header_params = {}
form_params = []
local_var_files = {}
body_params = None
# HTTP header `Accept`
header_params['Accept'] = self.api_client.select_header_accept(
['application/json']) # noqa: E501
# Authentication setting
auth_settings = ['APIKeyHeader', 'APIKeyQuery', 'NugetApiKeyHeader'] # noqa: E501
return self.api_client.call_api(
'/api/{baseSpaceId}/releases/{id}', 'DELETE',
path_params,
query_params,
header_params,
body=body_params,
post_params=form_params,
files=local_var_files,
response_type='TaskResource', # noqa: E501
auth_settings=auth_settings,
async_req=params.get('async_req'),
_return_http_data_only=params.get('_return_http_data_only'),
_preload_content=params.get('_preload_content', True),
_request_timeout=params.get('_request_timeout'),
collection_formats=collection_formats)
def index_response_descriptor_projects_release_release_resource(self, **kwargs): # noqa: E501
"""Get a list of ReleaseResources # noqa: E501
Lists all of the releases in the supplied Octopus Deploy Space, from all projects. The results will be sorted from most recent to least recent release. # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.index_response_descriptor_projects_release_release_resource(async_req=True)
>>> result = thread.get()
:param async_req bool
:param int skip: Number of items to skip
:param int take: Number of items to take
:return: ResourceCollectionReleaseResource
If the method is called asynchronously,
returns the request thread.
"""
kwargs['_return_http_data_only'] = True
if kwargs.get('async_req'):
return self.index_response_descriptor_projects_release_release_resource_with_http_info(**kwargs) # noqa: E501
else:
(data) = self.index_response_descriptor_projects_release_release_resource_with_http_info(**kwargs) # noqa: E501
return data
def index_response_descriptor_projects_release_release_resource_with_http_info(self, **kwargs): # noqa: E501
"""Get a list of ReleaseResources # noqa: E501
Lists all of the releases in the supplied Octopus Deploy Space, from all projects. The results will be sorted from most recent to least recent release. # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.index_response_descriptor_projects_release_release_resource_with_http_info(async_req=True)
>>> result = thread.get()
:param async_req bool
:param int skip: Number of items to skip
:param int take: Number of items to take
:return: ResourceCollectionReleaseResource
If the method is called asynchronously,
returns the request thread.
"""
all_params = ['skip', 'take'] # noqa: E501
all_params.append('async_req')
all_params.append('_return_http_data_only')
all_params.append('_preload_content')
all_params.append('_request_timeout')
params = locals()
for key, val in six.iteritems(params['kwargs']):
if key not in all_params:
raise TypeError(
"Got an unexpected keyword argument '%s'"
" to method index_response_descriptor_projects_release_release_resource" % key
)
params[key] = val
del params['kwargs']
collection_formats = {}
path_params = {}
query_params = []
if 'skip' in params:
query_params.append(('skip', params['skip'])) # noqa: E501
if 'take' in params:
query_params.append(('take', params['take'])) # noqa: E501
header_params = {}
form_params = []
local_var_files = {}
body_params = None
# HTTP header `Accept`
header_params['Accept'] = self.api_client.select_header_accept(
['application/json']) # noqa: E501
# Authentication setting
auth_settings = ['APIKeyHeader', 'APIKeyQuery', 'NugetApiKeyHeader'] # noqa: E501
return self.api_client.call_api(
'/api/releases', 'GET',
path_params,
query_params,
header_params,
body=body_params,
post_params=form_params,
files=local_var_files,
response_type='ResourceCollectionReleaseResource', # noqa: E501
auth_settings=auth_settings,
async_req=params.get('async_req'),
_return_http_data_only=params.get('_return_http_data_only'),
_preload_content=params.get('_preload_content', True),
_request_timeout=params.get('_request_timeout'),
collection_formats=collection_formats)
def index_response_descriptor_projects_release_release_resource_spaces(self, base_space_id, **kwargs): # noqa: E501
"""Get a list of ReleaseResources # noqa: E501
Lists all of the releases in the supplied Octopus Deploy Space, from all projects. The results will be sorted from most recent to least recent release. # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.index_response_descriptor_projects_release_release_resource_spaces(base_space_id, async_req=True)
>>> result = thread.get()
:param async_req bool
:param str base_space_id: ID of the space (required)
:param int skip: Number of items to skip
:param int take: Number of items to take
:return: ResourceCollectionReleaseResource
If the method is called asynchronously,
returns the request thread.
"""
kwargs['_return_http_data_only'] = True
if kwargs.get('async_req'):
return self.index_response_descriptor_projects_release_release_resource_spaces_with_http_info(base_space_id, **kwargs) # noqa: E501
else:
(data) = self.index_response_descriptor_projects_release_release_resource_spaces_with_http_info(base_space_id, **kwargs) # noqa: E501
return data
def index_response_descriptor_projects_release_release_resource_spaces_with_http_info(self, base_space_id, **kwargs): # noqa: E501
"""Get a list of ReleaseResources # noqa: E501
Lists all of the releases in the supplied Octopus Deploy Space, from all projects. The results will be sorted from most recent to least recent release. # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.index_response_descriptor_projects_release_release_resource_spaces_with_http_info(base_space_id, async_req=True)
>>> result = thread.get()
:param async_req bool
:param str base_space_id: ID of the space (required)
:param int skip: Number of items to skip
:param int take: Number of items to take
:return: ResourceCollectionReleaseResource
If the method is called asynchronously,
returns the request thread.
"""
all_params = ['base_space_id', 'skip', 'take'] # noqa: E501
all_params.append('async_req')
all_params.append('_return_http_data_only')
all_params.append('_preload_content')
all_params.append('_request_timeout')
params = locals()
for key, val in six.iteritems(params['kwargs']):
if key not in all_params:
raise TypeError(
"Got an unexpected keyword argument '%s'"
" to method index_response_descriptor_projects_release_release_resource_spaces" % key
)
params[key] = val
del params['kwargs']
# verify the required parameter 'base_space_id' is set
if ('base_space_id' not in params or
params['base_space_id'] is None):
raise ValueError("Missing the required parameter `base_space_id` when calling `index_response_descriptor_projects_release_release_resource_spaces`") # noqa: E501
collection_formats = {}
path_params = {}
if 'base_space_id' in params:
path_params['baseSpaceId'] = params['base_space_id'] # noqa: E501
query_params = []
if 'skip' in params:
query_params.append(('skip', params['skip'])) # noqa: E501
if 'take' in params:
query_params.append(('take', params['take'])) # noqa: E501
header_params = {}
form_params = []
local_var_files = {}
body_params = None
# HTTP header `Accept`
header_params['Accept'] = self.api_client.select_header_accept(
['application/json']) # noqa: E501
# Authentication setting
auth_settings = ['APIKeyHeader', 'APIKeyQuery', 'NugetApiKeyHeader'] # noqa: E501
return self.api_client.call_api(
'/api/{baseSpaceId}/releases', 'GET',
path_params,
query_params,
header_params,
body=body_params,
post_params=form_params,
files=local_var_files,
response_type='ResourceCollectionReleaseResource', # noqa: E501
auth_settings=auth_settings,
async_req=params.get('async_req'),
_return_http_data_only=params.get('_return_http_data_only'),
_preload_content=params.get('_preload_content', True),
_request_timeout=params.get('_request_timeout'),
collection_formats=collection_formats)
def load_response_descriptor_projects_release_release_resource(self, id, **kwargs): # noqa: E501
"""Get a ReleaseResource by ID # noqa: E501
Gets a release by ID. # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.load_response_descriptor_projects_release_release_resource(id, async_req=True)
>>> result = thread.get()
:param async_req bool
:param str id: ID of the ReleaseResource to load (required)
:return: ReleaseResource
If the method is called asynchronously,
returns the request thread.
"""
kwargs['_return_http_data_only'] = True
if kwargs.get('async_req'):
return self.load_response_descriptor_projects_release_release_resource_with_http_info(id, **kwargs) # noqa: E501
else:
(data) = self.load_response_descriptor_projects_release_release_resource_with_http_info(id, **kwargs) # noqa: E501
return data
def load_response_descriptor_projects_release_release_resource_with_http_info(self, id, **kwargs): # noqa: E501
"""Get a ReleaseResource by ID # noqa: E501
Gets a release by ID. # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.load_response_descriptor_projects_release_release_resource_with_http_info(id, async_req=True)
>>> result = thread.get()
:param async_req bool
:param str id: ID of the ReleaseResource to load (required)
:return: ReleaseResource
If the method is called asynchronously,
returns the request thread.
"""
all_params = ['id'] # noqa: E501
all_params.append('async_req')
all_params.append('_return_http_data_only')
all_params.append('_preload_content')
all_params.append('_request_timeout')
params = locals()
for key, val in six.iteritems(params['kwargs']):
if key not in all_params:
raise TypeError(
"Got an unexpected keyword argument '%s'"
" to method load_response_descriptor_projects_release_release_resource" % key
)
params[key] = val
del params['kwargs']
# verify the required parameter 'id' is set
if ('id' not in params or
params['id'] is None):
raise ValueError("Missing the required parameter `id` when calling `load_response_descriptor_projects_release_release_resource`") # noqa: E501
collection_formats = {}
path_params = {}
if 'id' in params:
path_params['id'] = params['id'] # noqa: E501
query_params = []
header_params = {}
form_params = []
local_var_files = {}
body_params = None
# HTTP header `Accept`
header_params['Accept'] = self.api_client.select_header_accept(
['application/json']) # noqa: E501
# Authentication setting
auth_settings = ['APIKeyHeader', 'APIKeyQuery', 'NugetApiKeyHeader'] # noqa: E501
return self.api_client.call_api(
'/api/releases/{id}', 'GET',
path_params,
query_params,
header_params,
body=body_params,
post_params=form_params,
files=local_var_files,
response_type='ReleaseResource', # noqa: E501
auth_settings=auth_settings,
async_req=params.get('async_req'),
_return_http_data_only=params.get('_return_http_data_only'),
_preload_content=params.get('_preload_content', True),
_request_timeout=params.get('_request_timeout'),
collection_formats=collection_formats)
def load_response_descriptor_projects_release_release_resource_spaces(self, base_space_id, id, **kwargs): # noqa: E501
"""Get a ReleaseResource by ID # noqa: E501
Gets a release by ID. # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.load_response_descriptor_projects_release_release_resource_spaces(base_space_id, id, async_req=True)
>>> result = thread.get()
:param async_req bool
:param str base_space_id: ID of the space (required)
:param str id: ID of the ReleaseResource to load (required)
:return: ReleaseResource
If the method is called asynchronously,
returns the request thread.
"""
kwargs['_return_http_data_only'] = True
if kwargs.get('async_req'):
return self.load_response_descriptor_projects_release_release_resource_spaces_with_http_info(base_space_id, id, **kwargs) # noqa: E501
else:
(data) = self.load_response_descriptor_projects_release_release_resource_spaces_with_http_info(base_space_id, id, **kwargs) # noqa: E501
return data
def load_response_descriptor_projects_release_release_resource_spaces_with_http_info(self, base_space_id, id, **kwargs): # noqa: E501
"""Get a ReleaseResource by ID # noqa: E501
Gets a release by ID. # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.load_response_descriptor_projects_release_release_resource_spaces_with_http_info(base_space_id, id, async_req=True)
>>> result = thread.get()
:param async_req bool
:param str base_space_id: ID of the space (required)
:param str id: ID of the ReleaseResource to load (required)
:return: ReleaseResource
If the method is called asynchronously,
returns the request thread.
"""
all_params = ['base_space_id', 'id'] # noqa: E501
all_params.append('async_req')
all_params.append('_return_http_data_only')
all_params.append('_preload_content')
all_params.append('_request_timeout')
params = locals()
for key, val in six.iteritems(params['kwargs']):
if key not in all_params:
raise TypeError(
"Got an unexpected keyword argument '%s'"
" to method load_response_descriptor_projects_release_release_resource_spaces" % key
)
params[key] = val
del params['kwargs']
# verify the required parameter 'base_space_id' is set
if ('base_space_id' not in params or
params['base_space_id'] is None):
raise ValueError("Missing the required parameter `base_space_id` when calling `load_response_descriptor_projects_release_release_resource_spaces`") # noqa: E501
# verify the required parameter 'id' is set
if ('id' not in params or
params['id'] is None):
raise ValueError("Missing the required parameter `id` when calling `load_response_descriptor_projects_release_release_resource_spaces`") # noqa: E501
collection_formats = {}
path_params = {}
if 'base_space_id' in params:
path_params['baseSpaceId'] = params['base_space_id'] # noqa: E501
if 'id' in params:
path_params['id'] = params['id'] # noqa: E501
query_params = []
header_params = {}
form_params = []
local_var_files = {}
body_params = None
# HTTP header `Accept`
header_params['Accept'] = self.api_client.select_header_accept(
['application/json']) # noqa: E501
# Authentication setting
auth_settings = ['APIKeyHeader', 'APIKeyQuery', 'NugetApiKeyHeader'] # noqa: E501
return self.api_client.call_api(
'/api/{baseSpaceId}/releases/{id}', 'GET',
path_params,
query_params,
header_params,
body=body_params,
post_params=form_params,
files=local_var_files,
response_type='ReleaseResource', # noqa: E501
auth_settings=auth_settings,
async_req=params.get('async_req'),
_return_http_data_only=params.get('_return_http_data_only'),
_preload_content=params.get('_preload_content', True),
_request_timeout=params.get('_request_timeout'),
collection_formats=collection_formats)
def modify_response_descriptor_projects_release_release_resource(self, id, **kwargs): # noqa: E501
"""Modify a ReleaseResource by ID # noqa: E501
Updates an existing release. # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.modify_response_descriptor_projects_release_release_resource(id, async_req=True)
>>> result = thread.get()
:param async_req bool
:param str id: ID of the ReleaseResource to modify (required)
:param ReleaseResource release_resource: The ReleaseResource resource to create
:return: ReleaseResource
If the method is called asynchronously,
returns the request thread.
"""
kwargs['_return_http_data_only'] = True
if kwargs.get('async_req'):
return self.modify_response_descriptor_projects_release_release_resource_with_http_info(id, **kwargs) # noqa: E501
else:
(data) = self.modify_response_descriptor_projects_release_release_resource_with_http_info(id, **kwargs) # noqa: E501
return data
def modify_response_descriptor_projects_release_release_resource_with_http_info(self, id, **kwargs): # noqa: E501
"""Modify a ReleaseResource by ID # noqa: E501
Updates an existing release. # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.modify_response_descriptor_projects_release_release_resource_with_http_info(id, async_req=True)
>>> result = thread.get()
:param async_req bool
:param str id: ID of the ReleaseResource to modify (required)
:param ReleaseResource release_resource: The ReleaseResource resource to create
:return: ReleaseResource
If the method is called asynchronously,
returns the request thread.
"""
all_params = ['id', 'release_resource'] # noqa: E501
all_params.append('async_req')
all_params.append('_return_http_data_only')
all_params.append('_preload_content')
all_params.append('_request_timeout')
params = locals()
for key, val in six.iteritems(params['kwargs']):
if key not in all_params:
raise TypeError(
"Got an unexpected keyword argument '%s'"
" to method modify_response_descriptor_projects_release_release_resource" % key
)
params[key] = val
del params['kwargs']
# verify the required parameter 'id' is set
if ('id' not in params or
params['id'] is None):
raise ValueError("Missing the required parameter `id` when calling `modify_response_descriptor_projects_release_release_resource`") # noqa: E501
collection_formats = {}
path_params = {}
if 'id' in params:
path_params['id'] = params['id'] # noqa: E501
query_params = []
header_params = {}
form_params = []
local_var_files = {}
body_params = None
if 'release_resource' in params:
body_params = params['release_resource']
# HTTP header `Accept`
header_params['Accept'] = self.api_client.select_header_accept(
['application/json']) # noqa: E501
# Authentication setting
auth_settings = ['APIKeyHeader', 'APIKeyQuery', 'NugetApiKeyHeader'] # noqa: E501
return self.api_client.call_api(
'/api/releases/{id}', 'PUT',
path_params,
query_params,
header_params,
body=body_params,
post_params=form_params,
files=local_var_files,
response_type='ReleaseResource', # noqa: E501
auth_settings=auth_settings,
async_req=params.get('async_req'),
_return_http_data_only=params.get('_return_http_data_only'),
_preload_content=params.get('_preload_content', True),
_request_timeout=params.get('_request_timeout'),
collection_formats=collection_formats)
def modify_response_descriptor_projects_release_release_resource_spaces(self, base_space_id, id, **kwargs): # noqa: E501
"""Modify a ReleaseResource by ID # noqa: E501
Updates an existing release. # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.modify_response_descriptor_projects_release_release_resource_spaces(base_space_id, id, async_req=True)
>>> result = thread.get()
:param async_req bool
:param str base_space_id: ID of the space (required)
:param str id: ID of the ReleaseResource to modify (required)
:param ReleaseResource release_resource: The ReleaseResource resource to create
:return: ReleaseResource
If the method is called asynchronously,
returns the request thread.
"""
kwargs['_return_http_data_only'] = True
if kwargs.get('async_req'):
return self.modify_response_descriptor_projects_release_release_resource_spaces_with_http_info(base_space_id, id, **kwargs) # noqa: E501
else:
(data) = self.modify_response_descriptor_projects_release_release_resource_spaces_with_http_info(base_space_id, id, **kwargs) # noqa: E501
return data
def modify_response_descriptor_projects_release_release_resource_spaces_with_http_info(self, base_space_id, id, **kwargs): # noqa: E501
"""Modify a ReleaseResource by ID # noqa: E501
Updates an existing release. # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.modify_response_descriptor_projects_release_release_resource_spaces_with_http_info(base_space_id, id, async_req=True)
>>> result = thread.get()
:param async_req bool
:param str base_space_id: ID of the space (required)
:param str id: ID of the ReleaseResource to modify (required)
:param ReleaseResource release_resource: The ReleaseResource resource to create
:return: ReleaseResource
If the method is called asynchronously,
returns the request thread.
"""
all_params = ['base_space_id', 'id', 'release_resource'] # noqa: E501
all_params.append('async_req')
all_params.append('_return_http_data_only')
all_params.append('_preload_content')
all_params.append('_request_timeout')
params = locals()
for key, val in six.iteritems(params['kwargs']):
if key not in all_params:
raise TypeError(
"Got an unexpected keyword argument '%s'"
" to method modify_response_descriptor_projects_release_release_resource_spaces" % key
)
params[key] = val
del params['kwargs']
# verify the required parameter 'base_space_id' is set
if ('base_space_id' not in params or
params['base_space_id'] is None):
raise ValueError("Missing the required parameter `base_space_id` when calling `modify_response_descriptor_projects_release_release_resource_spaces`") # noqa: E501
# verify the required parameter 'id' is set
if ('id' not in params or
params['id'] is None):
raise ValueError("Missing the required parameter `id` when calling `modify_response_descriptor_projects_release_release_resource_spaces`") # noqa: E501
collection_formats = {}
path_params = {}
if 'base_space_id' in params:
path_params['baseSpaceId'] = params['base_space_id'] # noqa: E501
if 'id' in params:
path_params['id'] = params['id'] # noqa: E501
query_params = []
header_params = {}
form_params = []
local_var_files = {}
body_params = None
if 'release_resource' in params:
body_params = params['release_resource']
# HTTP header `Accept`
header_params['Accept'] = self.api_client.select_header_accept(
['application/json']) # noqa: E501
# Authentication setting
auth_settings = ['APIKeyHeader', 'APIKeyQuery', 'NugetApiKeyHeader'] # noqa: E501
return self.api_client.call_api(
'/api/{baseSpaceId}/releases/{id}', 'PUT',
path_params,
query_params,
header_params,
body=body_params,
post_params=form_params,
files=local_var_files,
response_type='ReleaseResource', # noqa: E501
auth_settings=auth_settings,
async_req=params.get('async_req'),
_return_http_data_only=params.get('_return_http_data_only'),
_preload_content=params.get('_preload_content', True),
_request_timeout=params.get('_request_timeout'),
collection_formats=collection_formats)
| 49.789826
| 268
| 0.671755
| 19,472
| 163,460
| 5.299661
| 0.013815
| 0.038529
| 0.025583
| 0.05978
| 0.995019
| 0.995019
| 0.995019
| 0.993914
| 0.993546
| 0.993546
| 0
| 0.012774
| 0.252881
| 163,460
| 3,282
| 269
| 49.804997
| 0.832228
| 0.35375
| 0
| 0.838997
| 1
| 0
| 0.239249
| 0.113744
| 0
| 0
| 0
| 0
| 0
| 1
| 0.036212
| false
| 0
| 0.002228
| 0
| 0.092479
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 1
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 8
|
d0431b1e4800152399ab8d0e8023c7c39f47a44e
| 15,616
|
py
|
Python
|
rqalpha/examples/extend_api/CtpMod/util.py
|
hadrianl/rqalpha_kairui
|
eb0e0de1d69f1a4d6f349d4dc011e1c9eccfe2d8
|
[
"Apache-2.0"
] | 2
|
2019-03-06T00:39:45.000Z
|
2019-05-26T15:34:03.000Z
|
rqalpha/examples/extend_api/CtpMod/util.py
|
hadrianl/rqalpha_kairui
|
eb0e0de1d69f1a4d6f349d4dc011e1c9eccfe2d8
|
[
"Apache-2.0"
] | null | null | null |
rqalpha/examples/extend_api/CtpMod/util.py
|
hadrianl/rqalpha_kairui
|
eb0e0de1d69f1a4d6f349d4dc011e1c9eccfe2d8
|
[
"Apache-2.0"
] | null | null | null |
#!/usr/bin/env python
# -*- coding: utf-8 -*-
# @Time : 2018/8/29 0029 11:27
# @Author : Hadrianl
# @File : util
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
from rqalpha.const import COMMISSION_TYPE
FUTURE_INFO = {
'AP': {'contract_multiplier': 10, 'margin_rate': 0.14,
'commission': {'commission_type': COMMISSION_TYPE.BY_VOLUME, 'open_commission_ratio': 20,
'close_commission_ratio': 20, 'close_commission_today_ratio': 20}},
'MA': {'contract_multiplier': 10, 'margin_rate': 0.11,
'commission': {'commission_type': COMMISSION_TYPE.BY_VOLUME, 'open_commission_ratio': 2,
'close_commission_ratio': 2, 'close_commission_today_ratio': 6}},
'SM': {'contract_multiplier': 5, 'margin_rate': 0.11,
'commission': {'commission_type': COMMISSION_TYPE.BY_VOLUME, 'open_commission_ratio': 3,
'close_commission_ratio': 3, 'close_commission_today_ratio': 6}},
'SF': {'contract_multiplier': 5, 'margin_rate': 0.11,
'commission': {'commission_type': COMMISSION_TYPE.BY_VOLUME, 'open_commission_ratio': 3,
'close_commission_ratio': 3, 'close_commission_today_ratio': 9}},
'FG': {'contract_multiplier': 20, 'margin_rate': 0.11,
'commission': {'commission_type': COMMISSION_TYPE.BY_VOLUME, 'open_commission_ratio': 3,
'close_commission_ratio': 3, 'close_commission_today_ratio': 24}},
'CF': {'contract_multiplier': 5, 'margin_rate': 0.1,
'commission': {'commission_type': COMMISSION_TYPE.BY_VOLUME, 'open_commission_ratio': 4.3,
'close_commission_ratio': 4.3, 'close_commission_today_ratio': 0}},
'CY': {'contract_multiplier': 5, 'margin_rate': 0.08,
'commission': {'commission_type': COMMISSION_TYPE.BY_VOLUME, 'open_commission_ratio': 4,
'close_commission_ratio': 4, 'close_commission_today_ratio': 0}},
'OI': {'contract_multiplier': 10, 'margin_rate': 0.1,
'commission': {'commission_type': COMMISSION_TYPE.BY_VOLUME, 'open_commission_ratio': 2,
'close_commission_ratio': 2, 'close_commission_today_ratio': 0}},
'SR': {'contract_multiplier': 10, 'margin_rate': 0.09,
'commission': {'commission_type': COMMISSION_TYPE.BY_VOLUME, 'open_commission_ratio': 3,
'close_commission_ratio': 3, 'close_commission_today_ratio': 0}},
'PTA': {'contract_multiplier': 5, 'margin_rate': 0.1,
'commission': {'commission_type': COMMISSION_TYPE.BY_VOLUME, 'open_commission_ratio': 3,
'close_commission_ratio': 3, 'close_commission_today_ratio': 0}},
'TA': {'contract_multiplier': 5, 'margin_rate': 0.1,
'commission': {'commission_type': COMMISSION_TYPE.BY_VOLUME, 'open_commission_ratio': 3,
'close_commission_ratio': 3, 'close_commission_today_ratio': 0}},
'RM': {'contract_multiplier': 10, 'margin_rate': 0.09,
'commission': {'commission_type': COMMISSION_TYPE.BY_VOLUME, 'open_commission_ratio': 1.5,
'close_commission_ratio': 1.5, 'close_commission_today_ratio': 0}},
'ZC': {'contract_multiplier': 100, 'margin_rate': 0.12,
'commission': {'commission_type': COMMISSION_TYPE.BY_VOLUME, 'open_commission_ratio': 4,
'close_commission_ratio': 4, 'close_commission_today_ratio': 4}},
'JR': {'contract_multiplier': 100, 'margin_rate': 0.08, ###
'commission': {'commission_type': COMMISSION_TYPE.BY_VOLUME, 'open_commission_ratio': 0,
'close_commission_ratio': 0, 'close_commission_today_ratio': 0}},
'LR': {'contract_multiplier': 100, 'margin_rate': 0.07, ###
'commission': {'commission_type': COMMISSION_TYPE.BY_VOLUME, 'open_commission_ratio': 0,
'close_commission_ratio': 0, 'close_commission_today_ratio': 0}},
'PM': {'contract_multiplier': 100, 'margin_rate': 0.09, ###
'commission': {'commission_type': COMMISSION_TYPE.BY_VOLUME, 'open_commission_ratio': 0,
'close_commission_ratio': 0, 'close_commission_today_ratio': 0}},
'RI': {'contract_multiplier': 100, 'margin_rate': 0.07, ###
'commission': {'commission_type': COMMISSION_TYPE.BY_VOLUME, 'open_commission_ratio': 0,
'close_commission_ratio': 0, 'close_commission_today_ratio': 0}},
'RS': {'contract_multiplier': 100, 'margin_rate': 0.23, ###
'commission': {'commission_type': COMMISSION_TYPE.BY_VOLUME, 'open_commission_ratio': 0,
'close_commission_ratio': 0, 'close_commission_today_ratio': 0}},
'WH': {'contract_multiplier': 100, 'margin_rate': 0.22, ###
'commission': {'commission_type': COMMISSION_TYPE.BY_VOLUME, 'open_commission_ratio': 0,
'close_commission_ratio': 0, 'close_commission_today_ratio': 0}},
'JM': {'contract_multiplier': 60, 'margin_rate': 0.13,
'commission': {'commission_type': COMMISSION_TYPE.BY_MONEY, 'open_commission_ratio': 0.00006,
'close_commission_ratio': 0.00018, 'close_commission_today_ratio': 0.00018}},
'J': {'contract_multiplier': 100, 'margin_rate': 0.13,
'commission': {'commission_type': COMMISSION_TYPE.BY_MONEY, 'open_commission_ratio': 0.00006,
'close_commission_ratio': 0.00018, 'close_commission_today_ratio': 0.00018}},
'I': {'contract_multiplier': 100, 'margin_rate': 0.12,
'commission': {'commission_type': COMMISSION_TYPE.BY_MONEY, 'open_commission_ratio': 0.00006,
'close_commission_ratio': 0.00012, 'close_commission_today_ratio': 0.00012}},
'C': {'contract_multiplier': 10, 'margin_rate': 0.08,
'commission': {'commission_type': COMMISSION_TYPE.BY_VOLUME, 'open_commission_ratio': 1.2,
'close_commission_ratio': 2, 'close_commission_today_ratio': 0}},
'V': {'contract_multiplier': 5, 'margin_rate': 0.1,
'commission': {'commission_type': COMMISSION_TYPE.BY_VOLUME, 'open_commission_ratio': 2,
'close_commission_ratio': 2, 'close_commission_today_ratio': 0}},
'B': {'contract_multiplier': 10, 'margin_rate': 0.11,
'commission': {'commission_type': COMMISSION_TYPE.BY_VOLUME, 'open_commission_ratio': 2,
'close_commission_ratio': 2, 'close_commission_today_ratio': 2}},
'M': {'contract_multiplier': 10, 'margin_rate': 0.11,
'commission': {'commission_type': COMMISSION_TYPE.BY_VOLUME, 'open_commission_ratio': 1.5,
'close_commission_ratio': 1.5, 'close_commission_today_ratio': 1.5}},
'P': {'contract_multiplier': 10, 'margin_rate': 0.09,
'commission': {'commission_type': COMMISSION_TYPE.BY_VOLUME, 'open_commission_ratio': 2.5,
'close_commission_ratio': 2.5, 'close_commission_today_ratio': 2.5}},
'PP': {'contract_multiplier': 5, 'margin_rate': 0.01,
'commission': {'commission_type': COMMISSION_TYPE.BY_MONEY, 'open_commission_ratio': 0.00006,
'close_commission_ratio': 0.00006, 'close_commission_today_ratio': 0.00006}},
'Y': {'contract_multiplier': 10, 'margin_rate': 0.09,
'commission': {'commission_type': COMMISSION_TYPE.BY_VOLUME, 'open_commission_ratio': 2.5,
'close_commission_ratio': 2.5, 'close_commission_today_ratio': 2.5}},
'A': {'contract_multiplier': 10, 'margin_rate': 0.1,
'commission': {'commission_type': COMMISSION_TYPE.BY_VOLUME, 'open_commission_ratio': 2,
'close_commission_ratio': 2, 'close_commission_today_ratio': 2}},
'CS': {'contract_multiplier': 10, 'margin_rate': 0.08,
'commission': {'commission_type': COMMISSION_TYPE.BY_VOLUME, 'open_commission_ratio': 1.5,
'close_commission_ratio': 1.5, 'close_commission_today_ratio': 1.5}},
'L': {'contract_multiplier': 5, 'margin_rate': 0.1,
'commission': {'commission_type': COMMISSION_TYPE.BY_VOLUME, 'open_commission_ratio': 2,
'close_commission_ratio': 2, 'close_commission_today_ratio': 2}},
'JD': {'contract_multiplier': 10, 'margin_rate': 0.12,
'commission': {'commission_type': COMMISSION_TYPE.BY_MONEY, 'open_commission_ratio': 0.00015,
'close_commission_ratio': 0.00015, 'close_commission_today_ratio': 0.00015}},
'BB': {'contract_multiplier': 10, 'margin_rate': 0.22, ###
'commission': {'commission_type': COMMISSION_TYPE.BY_MONEY, 'open_commission_ratio': 0.0,
'close_commission_ratio': 0.0, 'close_commission_today_ratio': 0.0}},
'FB': {'contract_multiplier': 10, 'margin_rate': 0.22, ###
'commission': {'commission_type': COMMISSION_TYPE.BY_MONEY, 'open_commission_ratio': 0.0,
'close_commission_ratio': 0.0, 'close_commission_today_ratio': 0.0}},
'SN': {'contract_multiplier': 1, 'margin_rate': 0.1,
'commission': {'commission_type': COMMISSION_TYPE.BY_VOLUME, 'open_commission_ratio': 3,
'close_commission_ratio': 3, 'close_commission_today_ratio': 0}},
'AL': {'contract_multiplier': 5, 'margin_rate': 0.11,
'commission': {'commission_type': COMMISSION_TYPE.BY_VOLUME, 'open_commission_ratio': 3,
'close_commission_ratio': 3, 'close_commission_today_ratio': 0}},
'AU': {'contract_multiplier': 1000, 'margin_rate': 0.09,
'commission': {'commission_type': COMMISSION_TYPE.BY_VOLUME, 'open_commission_ratio': 10,
'close_commission_ratio': 10, 'close_commission_today_ratio': 0}},
'ZN': {'contract_multiplier': 5, 'margin_rate': 0.11,
'commission': {'commission_type': COMMISSION_TYPE.BY_VOLUME, 'open_commission_ratio': 3,
'close_commission_ratio': 3, 'close_commission_today_ratio': 0}},
'CU': {'contract_multiplier': 5, 'margin_rate': 0.11,
'commission': {'commission_type': COMMISSION_TYPE.BY_MONEY, 'open_commission_ratio': 0.00005,
'close_commission_ratio': 0.00005, 'close_commission_today_ratio': 0}},
'SC': {'contract_multiplier': 1000, 'margin_rate': 0.11,
'commission': {'commission_type': COMMISSION_TYPE.BY_VOLUME, 'open_commission_ratio': 20,
'close_commission_ratio': 20, 'close_commission_today_ratio': 0}},
'PB': {'contract_multiplier': 5, 'margin_rate': 0.11,
'commission': {'commission_type': COMMISSION_TYPE.BY_MONEY, 'open_commission_ratio': 0.00004,
'close_commission_ratio': 0.00004, 'close_commission_today_ratio': 0}},
'BU': {'contract_multiplier': 10, 'margin_rate': 0.12,
'commission': {'commission_type': COMMISSION_TYPE.BY_MONEY, 'open_commission_ratio': 0.0001,
'close_commission_ratio': 0.0001, 'close_commission_today_ratio': 0.0001}},
'HC': {'contract_multiplier': 10, 'margin_rate': 0.12,
'commission': {'commission_type': COMMISSION_TYPE.BY_MONEY, 'open_commission_ratio': 0.0001,
'close_commission_ratio': 0.0001, 'close_commission_today_ratio': 0.0001}},
'NI': {'contract_multiplier': 1, 'margin_rate': 0.01,
'commission': {'commission_type': COMMISSION_TYPE.BY_VOLUME, 'open_commission_ratio': 6,
'close_commission_ratio': 6, 'close_commission_today_ratio': 6}},
'RB': {'contract_multiplier': 10, 'margin_rate': 0.13,
'commission': {'commission_type': COMMISSION_TYPE.BY_MONEY, 'open_commission_ratio': 0.0001,
'close_commission_ratio': 0.0001, 'close_commission_today_ratio': 0.0001}},
'RU': {'contract_multiplier': 10, 'margin_rate': 0.14,
'commission': {'commission_type': COMMISSION_TYPE.BY_MONEY, 'open_commission_ratio': 0.000045,
'close_commission_ratio': 0.000045, 'close_commission_today_ratio': 0.000045}},
'AG': {'contract_multiplier': 15, 'margin_rate': 0.1,
'commission': {'commission_type': COMMISSION_TYPE.BY_MONEY, 'open_commission_ratio': 0.00005,
'close_commission_ratio': 0.00005, 'close_commission_today_ratio': 0.00005}},
'FU': {'contract_multiplier': 10, 'margin_rate': 0.14, ###
'commission': {'commission_type': COMMISSION_TYPE.BY_MONEY, 'open_commission_ratio': 0.0,
'close_commission_ratio': 0.0, 'close_commission_today_ratio': 0.0}},
'WR': {'contract_multiplier': 10, 'margin_rate': 0.22, ###
'commission': {'commission_type': COMMISSION_TYPE.BY_MONEY, 'open_commission_ratio': 0.0,
'close_commission_ratio': 0.0, 'close_commission_today_ratio': 0.0}},
'IC': {'contract_multiplier': 200, 'margin_rate': 0.3,
'commission': {'commission_type': COMMISSION_TYPE.BY_MONEY, 'open_commission_ratio': 0.000023,
'close_commission_ratio': 0.000023, 'close_commission_today_ratio': 0.000069}},
'IF': {'contract_multiplier': 300, 'margin_rate': 0.16,
'commission': {'commission_type': COMMISSION_TYPE.BY_MONEY, 'open_commission_ratio': 0.000023,
'close_commission_ratio': 0.000023, 'close_commission_today_ratio': 0.000069}},
'IH': {'contract_multiplier': 300, 'margin_rate': 0.16,
'commission': {'commission_type': COMMISSION_TYPE.BY_MONEY, 'open_commission_ratio': 0.000023,
'close_commission_ratio': 0.000023, 'close_commission_today_ratio': 0.000069}},
'T': {'contract_multiplier': 10000, 'margin_rate': 0.03,
'commission': {'commission_type': COMMISSION_TYPE.BY_VOLUME, 'open_commission_ratio': 3,
'close_commission_ratio': 3, 'close_commission_today_ratio': 0}},
'TF': {'contract_multiplier': 10000, 'margin_rate': 0.022,
'commission': {'commission_type': COMMISSION_TYPE.BY_VOLUME, 'open_commission_ratio': 3,
'close_commission_ratio': 3, 'close_commission_today_ratio': 0}},
'TS': {'contract_multiplier': 10000, 'margin_rate': 0.005,
'commission': {'commission_type': COMMISSION_TYPE.BY_VOLUME, 'open_commission_ratio': 3,
'close_commission_ratio': 3, 'close_commission_today_ratio': 0}},
'EG': {'contract_multiplier': 10, 'margin_rate': 0.05,
'commission': {'commission_type': COMMISSION_TYPE.BY_VOLUME, 'open_commission_ratio': 3,
'close_commission_ratio': 3, 'close_commission_today_ratio': 0}},
'SP': {'contract_multiplier': 10, 'margin_rate': 0.04,
'commission': {'commission_type': COMMISSION_TYPE.BY_VOLUME, 'open_commission_ratio': 3,
'close_commission_ratio': 3, 'close_commission_today_ratio': 0}},
}
| 78.472362
| 105
| 0.642802
| 1,808
| 15,616
| 5.165929
| 0.105642
| 0.175375
| 0.068308
| 0.211135
| 0.916702
| 0.89818
| 0.830835
| 0.830835
| 0.830835
| 0.830835
| 0
| 0.060278
| 0.212795
| 15,616
| 198
| 106
| 78.868687
| 0.699504
| 0.040535
| 0
| 0.508475
| 0
| 0
| 0.496351
| 0.275691
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| false
| 0
| 0.00565
| 0
| 0.00565
| 0
| 0
| 0
| 0
| null | 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 7
|
d05204fbc1333e52b09d31dec72734f38fdd132f
| 141,864
|
py
|
Python
|
libs/external_libs/gdata.py-1.0.13/src/gdata/test_data.py
|
google-code-export/django-hotclub
|
d783a5bbcc06816289565f3eae6d99461188ca4a
|
[
"MIT"
] | 3
|
2015-12-25T14:45:36.000Z
|
2016-11-28T09:58:03.000Z
|
libs/external_libs/gdata.py-1.0.13/src/gdata/test_data.py
|
indro/t2c
|
56482ad4aed150f29353e054db2c97b567243bf8
|
[
"MIT"
] | null | null | null |
libs/external_libs/gdata.py-1.0.13/src/gdata/test_data.py
|
indro/t2c
|
56482ad4aed150f29353e054db2c97b567243bf8
|
[
"MIT"
] | null | null | null |
#!/usr/bin/python
#
# Copyright (C) 2006 Google Inc.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
XML_ENTRY_1 = """<?xml version='1.0'?>
<entry xmlns='http://www.w3.org/2005/Atom'
xmlns:g='http://base.google.com/ns/1.0'>
<category scheme="http://base.google.com/categories/itemtypes"
term="products"/>
<id> http://www.google.com/test/id/url </id>
<title type='text'>Testing 2000 series laptop</title>
<content type='xhtml'>
<div xmlns='http://www.w3.org/1999/xhtml'>A Testing Laptop</div>
</content>
<link rel='alternate' type='text/html'
href='http://www.provider-host.com/123456789'/>
<link rel='license'
href='http://creativecommons.org/licenses/by-nc/2.5/rdf'/>
<g:label>Computer</g:label>
<g:label>Laptop</g:label>
<g:label>testing laptop</g:label>
<g:item_type>products</g:item_type>
</entry>"""
TEST_BASE_ENTRY = """<?xml version='1.0'?>
<entry xmlns='http://www.w3.org/2005/Atom'
xmlns:g='http://base.google.com/ns/1.0'>
<category scheme="http://base.google.com/categories/itemtypes"
term="products"/>
<title type='text'>Testing 2000 series laptop</title>
<content type='xhtml'>
<div xmlns='http://www.w3.org/1999/xhtml'>A Testing Laptop</div>
</content>
<app:control xmlns:app='http://purl.org/atom/app#'>
<app:draft>yes</app:draft>
<gm:disapproved xmlns:gm='http://base.google.com/ns-metadata/1.0'/>
</app:control>
<link rel='alternate' type='text/html'
href='http://www.provider-host.com/123456789'/>
<g:label>Computer</g:label>
<g:label>Laptop</g:label>
<g:label>testing laptop</g:label>
<g:item_type>products</g:item_type>
</entry>"""
BIG_FEED = """<?xml version="1.0" encoding="utf-8"?>
<feed xmlns="http://www.w3.org/2005/Atom">
<title type="text">dive into mark</title>
<subtitle type="html">
A <em>lot</em> of effort
went into making this effortless
</subtitle>
<updated>2005-07-31T12:29:29Z</updated>
<id>tag:example.org,2003:3</id>
<link rel="alternate" type="text/html"
hreflang="en" href="http://example.org/"/>
<link rel="self" type="application/atom+xml"
href="http://example.org/feed.atom"/>
<rights>Copyright (c) 2003, Mark Pilgrim</rights>
<generator uri="http://www.example.com/" version="1.0">
Example Toolkit
</generator>
<entry>
<title>Atom draft-07 snapshot</title>
<link rel="alternate" type="text/html"
href="http://example.org/2005/04/02/atom"/>
<link rel="enclosure" type="audio/mpeg" length="1337"
href="http://example.org/audio/ph34r_my_podcast.mp3"/>
<id>tag:example.org,2003:3.2397</id>
<updated>2005-07-31T12:29:29Z</updated>
<published>2003-12-13T08:29:29-04:00</published>
<author>
<name>Mark Pilgrim</name>
<uri>http://example.org/</uri>
<email>f8dy@example.com</email>
</author>
<contributor>
<name>Sam Ruby</name>
</contributor>
<contributor>
<name>Joe Gregorio</name>
</contributor>
<content type="xhtml" xml:lang="en"
xml:base="http://diveintomark.org/">
<div xmlns="http://www.w3.org/1999/xhtml">
<p><i>[Update: The Atom draft is finished.]</i></p>
</div>
</content>
</entry>
</feed>
"""
SMALL_FEED = """<?xml version="1.0" encoding="utf-8"?>
<feed xmlns="http://www.w3.org/2005/Atom">
<title>Example Feed</title>
<link href="http://example.org/"/>
<updated>2003-12-13T18:30:02Z</updated>
<author>
<name>John Doe</name>
</author>
<id>urn:uuid:60a76c80-d399-11d9-b93C-0003939e0af6</id>
<entry>
<title>Atom-Powered Robots Run Amok</title>
<link href="http://example.org/2003/12/13/atom03"/>
<id>urn:uuid:1225c695-cfb8-4ebb-aaaa-80da344efa6a</id>
<updated>2003-12-13T18:30:02Z</updated>
<summary>Some text.</summary>
</entry>
</feed>
"""
GBASE_FEED = """<?xml version='1.0' encoding='UTF-8'?>
<feed xmlns='http://www.w3.org/2005/Atom' xmlns:openSearch='http://a9.com/-/spec/opensearchrss/1.0/' xmlns:g='http://base.google.com/ns/1.0' xmlns:batch='http://schemas.google.com/gdata/batch'>
<id>http://www.google.com/base/feeds/snippets</id>
<updated>2007-02-08T23:18:21.935Z</updated>
<title type='text'>Items matching query: digital camera</title>
<link rel='alternate' type='text/html' href='http://base.google.com'>
</link>
<link rel='http://schemas.google.com/g/2005#feed' type='application/atom+xml' href='http://www.google.com/base/feeds/snippets'>
</link>
<link rel='self' type='application/atom+xml' href='http://www.google.com/base/feeds/snippets?start-index=1&max-results=25&bq=digital+camera'>
</link>
<link rel='next' type='application/atom+xml' href='http://www.google.com/base/feeds/snippets?start-index=26&max-results=25&bq=digital+camera'>
</link>
<generator version='1.0' uri='http://base.google.com'>GoogleBase </generator>
<openSearch:totalResults>2171885</openSearch:totalResults>
<openSearch:startIndex>1</openSearch:startIndex>
<openSearch:itemsPerPage>25</openSearch:itemsPerPage>
<entry>
<id>http://www.google.com/base/feeds/snippets/13246453826751927533</id>
<published>2007-02-08T13:23:27.000Z</published>
<updated>2007-02-08T16:40:57.000Z</updated>
<category scheme='http://base.google.com/categories/itemtypes' term='Products'>
</category>
<title type='text'>Digital Camera Battery Notebook Computer 12v DC Power Cable - 5.5mm x 2.5mm (Center +) Camera Connecting Cables</title>
<content type='html'>Notebook Computer 12v DC Power Cable - 5.5mm x 2.1mm (Center +) This connection cable will allow any Digital Pursuits battery pack to power portable computers that operate with 12v power and have a 2.1mm power connector (center +) Digital ...</content>
<link rel='alternate' type='text/html' href='http://www.bhphotovideo.com/bnh/controller/home?O=productlist&A=details&Q=&sku=305668&is=REG&kw=DIDCB5092&BI=583'>
</link>
<link rel='self' type='application/atom+xml' href='http://www.google.com/base/feeds/snippets/13246453826751927533'>
</link>
<author>
<name>B&H Photo-Video</name>
<email>anon-szot0wdsq0at@base.google.com</email>
</author>
<g:payment_notes type='text'>PayPal & Bill Me Later credit available online only.</g:payment_notes>
<g:condition type='text'>new</g:condition>
<g:location type='location'>420 9th Ave. 10001</g:location>
<g:id type='text'>305668-REG</g:id>
<g:item_type type='text'>Products</g:item_type>
<g:brand type='text'>Digital Camera Battery</g:brand>
<g:expiration_date type='dateTime'>2007-03-10T13:23:27.000Z</g:expiration_date>
<g:customer_id type='int'>1172711</g:customer_id>
<g:price type='floatUnit'>34.95 usd</g:price>
<g:product_type type='text'>Digital Photography>Camera Connecting Cables</g:product_type>
<g:item_language type='text'>EN</g:item_language>
<g:manufacturer_id type='text'>DCB5092</g:manufacturer_id>
<g:target_country type='text'>US</g:target_country>
<g:weight type='float'>1.0</g:weight>
<g:image_link type='url'>http://base.google.com/base_image?q=http%3A%2F%2Fwww.bhphotovideo.com%2Fimages%2Fitems%2F305668.jpg&dhm=ffffffff84c9a95e&size=6</g:image_link>
</entry>
<entry>
<id>http://www.google.com/base/feeds/snippets/10145771037331858608</id>
<published>2007-02-08T13:23:27.000Z</published>
<updated>2007-02-08T16:40:57.000Z</updated>
<category scheme='http://base.google.com/categories/itemtypes' term='Products'>
</category>
<title type='text'>Digital Camera Battery Electronic Device 5v DC Power Cable - 5.5mm x 2.5mm (Center +) Camera Connecting Cables</title>
<content type='html'>Electronic Device 5v DC Power Cable - 5.5mm x 2.5mm (Center +) This connection cable will allow any Digital Pursuits battery pack to power any electronic device that operates with 5v power and has a 2.5mm power connector (center +) Digital ...</content>
<link rel='alternate' type='text/html' href='http://www.bhphotovideo.com/bnh/controller/home?O=productlist&A=details&Q=&sku=305656&is=REG&kw=DIDCB5108&BI=583'>
</link>
<link rel='self' type='application/atom+xml' href='http://www.google.com/base/feeds/snippets/10145771037331858608'>
</link>
<author>
<name>B&H Photo-Video</name>
<email>anon-szot0wdsq0at@base.google.com</email>
</author>
<g:location type='location'>420 9th Ave. 10001</g:location>
<g:condition type='text'>new</g:condition>
<g:weight type='float'>0.18</g:weight>
<g:target_country type='text'>US</g:target_country>
<g:product_type type='text'>Digital Photography>Camera Connecting Cables</g:product_type>
<g:payment_notes type='text'>PayPal & Bill Me Later credit available online only.</g:payment_notes>
<g:id type='text'>305656-REG</g:id>
<g:image_link type='url'>http://base.google.com/base_image?q=http%3A%2F%2Fwww.bhphotovideo.com%2Fimages%2Fitems%2F305656.jpg&dhm=7315bdc8&size=6</g:image_link>
<g:manufacturer_id type='text'>DCB5108</g:manufacturer_id>
<g:upc type='text'>838098005108</g:upc>
<g:price type='floatUnit'>34.95 usd</g:price>
<g:item_language type='text'>EN</g:item_language>
<g:brand type='text'>Digital Camera Battery</g:brand>
<g:customer_id type='int'>1172711</g:customer_id>
<g:item_type type='text'>Products</g:item_type>
<g:expiration_date type='dateTime'>2007-03-10T13:23:27.000Z</g:expiration_date>
</entry>
<entry>
<id>http://www.google.com/base/feeds/snippets/3128608193804768644</id>
<published>2007-02-08T02:21:27.000Z</published>
<updated>2007-02-08T15:40:13.000Z</updated>
<category scheme='http://base.google.com/categories/itemtypes' term='Products'>
</category>
<title type='text'>Digital Camera Battery Power Cable for Kodak 645 Pro-Back ProBack & DCS-300 Series Camera Connecting Cables</title>
<content type='html'>Camera Connection Cable - to Power Kodak 645 Pro-Back DCS-300 Series Digital Cameras This connection cable will allow any Digital Pursuits battery pack to power the following digital cameras: Kodak DCS Pro Back 645 DCS-300 series Digital Photography ...</content>
<link rel='alternate' type='text/html' href='http://www.bhphotovideo.com/bnh/controller/home?O=productlist&A=details&Q=&sku=305685&is=REG&kw=DIDCB6006&BI=583'>
</link>
<link rel='self' type='application/atom+xml' href='http://www.google.com/base/feeds/snippets/3128608193804768644'>
</link>
<author>
<name>B&H Photo-Video</name>
<email>anon-szot0wdsq0at@base.google.com</email>
</author>
<g:weight type='float'>0.3</g:weight>
<g:manufacturer_id type='text'>DCB6006</g:manufacturer_id>
<g:image_link type='url'>http://base.google.com/base_image?q=http%3A%2F%2Fwww.bhphotovideo.com%2Fimages%2Fitems%2F305685.jpg&dhm=72f0ca0a&size=6</g:image_link>
<g:location type='location'>420 9th Ave. 10001</g:location>
<g:payment_notes type='text'>PayPal & Bill Me Later credit available online only.</g:payment_notes>
<g:item_type type='text'>Products</g:item_type>
<g:target_country type='text'>US</g:target_country>
<g:accessory_for type='text'>digital kodak camera</g:accessory_for>
<g:brand type='text'>Digital Camera Battery</g:brand>
<g:expiration_date type='dateTime'>2007-03-10T02:21:27.000Z</g:expiration_date>
<g:item_language type='text'>EN</g:item_language>
<g:condition type='text'>new</g:condition>
<g:price type='floatUnit'>34.95 usd</g:price>
<g:customer_id type='int'>1172711</g:customer_id>
<g:product_type type='text'>Digital Photography>Camera Connecting Cables</g:product_type>
<g:id type='text'>305685-REG</g:id>
</entry>
</feed>"""
EXTENSION_TREE = """<?xml version="1.0" encoding="utf-8"?>
<feed xmlns="http://www.w3.org/2005/Atom">
<g:author xmlns:g="http://www.google.com">
<g:name>John Doe
<g:foo yes="no" up="down">Bar</g:foo>
</g:name>
</g:author>
</feed>
"""
TEST_AUTHOR = """<?xml version="1.0" encoding="utf-8"?>
<author xmlns="http://www.w3.org/2005/Atom">
<name xmlns="http://www.w3.org/2005/Atom">John Doe</name>
<email xmlns="http://www.w3.org/2005/Atom">johndoes@someemailadress.com</email>
<uri xmlns="http://www.w3.org/2005/Atom">http://www.google.com</uri>
</author>
"""
TEST_LINK = """<?xml version="1.0" encoding="utf-8"?>
<link xmlns="http://www.w3.org/2005/Atom" href="http://www.google.com"
rel="test rel" foo1="bar" foo2="rab"/>
"""
TEST_GBASE_ATTRIBUTE = """<?xml version="1.0" encoding="utf-8"?>
<g:brand type='text' xmlns:g="http://base.google.com/ns/1.0">Digital Camera Battery</g:brand>
"""
CALENDAR_FEED = """<?xml version='1.0' encoding='utf-8'?>
<feed xmlns='http://www.w3.org/2005/Atom'
xmlns:openSearch='http://a9.com/-/spec/opensearchrss/1.0/'
xmlns:gd='http://schemas.google.com/g/2005'
xmlns:gCal='http://schemas.google.com/gCal/2005'>
<id>http://www.google.com/calendar/feeds/default</id>
<updated>2007-03-20T22:48:57.833Z</updated>
<title type='text'>GData Ops Demo's Calendar List</title>
<link rel='http://schemas.google.com/g/2005#feed'
type='application/atom+xml'
href='http://www.google.com/calendar/feeds/default'></link>
<link rel='http://schemas.google.com/g/2005#post'
type='application/atom+xml'
href='http://www.google.com/calendar/feeds/default'></link>
<link rel='self' type='application/atom+xml'
href='http://www.google.com/calendar/feeds/default'></link>
<author>
<name>GData Ops Demo</name>
<email>gdata.ops.demo@gmail.com</email>
</author>
<generator version='1.0' uri='http://www.google.com/calendar'>
Google Calendar</generator>
<openSearch:startIndex>1</openSearch:startIndex>
<entry>
<id>
http://www.google.com/calendar/feeds/default/gdata.ops.demo%40gmail.com</id>
<published>2007-03-20T22:48:57.837Z</published>
<updated>2007-03-20T22:48:52.000Z</updated>
<title type='text'>GData Ops Demo</title>
<link rel='alternate' type='application/atom+xml'
href='http://www.google.com/calendar/feeds/gdata.ops.demo%40gmail.com/private/full'>
</link>
<link rel='self' type='application/atom+xml'
href='http://www.google.com/calendar/feeds/default/gdata.ops.demo%40gmail.com'>
</link>
<author>
<name>GData Ops Demo</name>
<email>gdata.ops.demo@gmail.com</email>
</author>
<gCal:color value='#2952A3'></gCal:color>
<gCal:accesslevel value='owner'></gCal:accesslevel>
<gCal:hidden value='false'></gCal:hidden>
<gCal:timezone value='America/Los_Angeles'></gCal:timezone>
</entry>
<entry>
<id>
http://www.google.com/calendar/feeds/default/jnh21ovnjgfph21h32gvms2758%40group.calendar.google.com</id>
<published>2007-03-20T22:48:57.837Z</published>
<updated>2007-03-20T22:48:53.000Z</updated>
<title type='text'>GData Ops Demo Secondary Calendar</title>
<summary type='text'></summary>
<link rel='alternate' type='application/atom+xml'
href='http://www.google.com/calendar/feeds/jnh21ovnjgfph21h32gvms2758%40group.calendar.google.com/private/full'>
</link>
<link rel='self' type='application/atom+xml'
href='http://www.google.com/calendar/feeds/default/jnh21ovnjgfph21h32gvms2758%40group.calendar.google.com'>
</link>
<author>
<name>GData Ops Demo Secondary Calendar</name>
</author>
<gCal:color value='#528800'></gCal:color>
<gCal:accesslevel value='owner'></gCal:accesslevel>
<gCal:hidden value='false'></gCal:hidden>
<gCal:timezone value='America/Los_Angeles'></gCal:timezone>
<gd:where valueString=''></gd:where>
</entry>
</feed>
"""
CALENDAR_FULL_EVENT_FEED = """<?xml version='1.0' encoding='utf-8'?>
<feed xmlns='http://www.w3.org/2005/Atom'
xmlns:openSearch='http://a9.com/-/spec/opensearchrss/1.0/'
xmlns:gd='http://schemas.google.com/g/2005'
xmlns:gCal='http://schemas.google.com/gCal/2005'>
<id>
http://www.google.com/calendar/feeds/default/private/full</id>
<updated>2007-03-20T21:29:57.000Z</updated>
<category scheme='http://schemas.google.com/g/2005#kind'
term='http://schemas.google.com/g/2005#event'></category>
<title type='text'>GData Ops Demo</title>
<subtitle type='text'>GData Ops Demo</subtitle>
<link rel='http://schemas.google.com/g/2005#feed'
type='application/atom+xml'
href='http://www.google.com/calendar/feeds/default/private/full'>
</link>
<link rel='http://schemas.google.com/g/2005#post'
type='application/atom+xml'
href='http://www.google.com/calendar/feeds/default/private/full'>
</link>
<link rel='self' type='application/atom+xml'
href='http://www.google.com/calendar/feeds/default/private/full?updated-min=2001-01-01&max-results=25'>
</link>
<author>
<name>GData Ops Demo</name>
<email>gdata.ops.demo@gmail.com</email>
</author>
<generator version='1.0' uri='http://www.google.com/calendar'>
Google Calendar</generator>
<openSearch:totalResults>10</openSearch:totalResults>
<openSearch:startIndex>1</openSearch:startIndex>
<openSearch:itemsPerPage>25</openSearch:itemsPerPage>
<gCal:timezone value='America/Los_Angeles'></gCal:timezone>
<entry>
<id>
http://www.google.com/calendar/feeds/default/private/full/o99flmgmkfkfrr8u745ghr3100</id>
<published>2007-03-20T21:29:52.000Z</published>
<updated>2007-03-20T21:29:57.000Z</updated>
<category scheme='http://schemas.google.com/g/2005#kind'
term='http://schemas.google.com/g/2005#event'></category>
<title type='text'>test deleted</title>
<content type='text'></content>
<link rel='alternate' type='text/html'
href='http://www.google.com/calendar/event?eid=bzk5ZmxtZ21rZmtmcnI4dTc0NWdocjMxMDAgZ2RhdGEub3BzLmRlbW9AbQ'
title='alternate'></link>
<link rel='self' type='application/atom+xml'
href='http://www.google.com/calendar/feeds/default/private/full/o99flmgmkfkfrr8u745ghr3100'>
</link>
<link rel='edit' type='application/atom+xml'
href='http://www.google.com/calendar/feeds/default/private/full/o99flmgmkfkfrr8u745ghr3100/63310109397'>
</link>
<author>
<name>GData Ops Demo</name>
<email>gdata.ops.demo@gmail.com</email>
</author>
<gCal:sendEventNotifications value='false'>
</gCal:sendEventNotifications>
<gd:eventStatus value='http://schemas.google.com/g/2005#event.canceled'>
</gd:eventStatus>
<gd:comments>
<gd:feedLink href='http://www.google.com/calendar/feeds/default/private/full/o99flmgmkfkfrr8u745ghr3100/comments'>
</gd:feedLink>
</gd:comments>
<gd:visibility value='http://schemas.google.com/g/2005#event.default'>
</gd:visibility>
<gd:transparency value='http://schemas.google.com/g/2005#event.opaque'>
</gd:transparency>
<gd:when startTime='2007-03-23T12:00:00.000-07:00'
endTime='2007-03-23T13:00:00.000-07:00'>
<gd:reminder minutes='10'></gd:reminder>
</gd:when>
<gd:where></gd:where>
</entry>
<entry>
<id>
http://www.google.com/calendar/feeds/default/private/full/2qt3ao5hbaq7m9igr5ak9esjo0</id>
<published>2007-03-20T21:26:04.000Z</published>
<updated>2007-03-20T21:28:46.000Z</updated>
<category scheme='http://schemas.google.com/g/2005#kind'
term='http://schemas.google.com/g/2005#event'></category>
<title type='text'>Afternoon at Dolores Park with Kim</title>
<content type='text'></content>
<link rel='alternate' type='text/html'
href='http://www.google.com/calendar/event?eid=MnF0M2FvNWhiYXE3bTlpZ3I1YWs5ZXNqbzAgZ2RhdGEub3BzLmRlbW9AbQ'
title='alternate'></link>
<link rel='self' type='application/atom+xml'
href='http://www.google.com/calendar/feeds/default/private/full/2qt3ao5hbaq7m9igr5ak9esjo0'>
</link>
<link rel='edit' type='application/atom+xml'
href='http://www.google.com/calendar/feeds/default/private/full/2qt3ao5hbaq7m9igr5ak9esjo0/63310109326'>
</link>
<author>
<name>GData Ops Demo</name>
<email>gdata.ops.demo@gmail.com</email>
</author>
<gCal:sendEventNotifications value='false'>
</gCal:sendEventNotifications>
<gd:eventStatus value='http://schemas.google.com/g/2005#event.confirmed'>
</gd:eventStatus>
<gd:comments>
<gd:feedLink href='http://www.google.com/calendar/feeds/default/private/full/2qt3ao5hbaq7m9igr5ak9esjo0/comments'>
</gd:feedLink>
</gd:comments>
<gd:visibility value='http://schemas.google.com/g/2005#event.private'>
</gd:visibility>
<gd:transparency value='http://schemas.google.com/g/2005#event.opaque'>
</gd:transparency>
<gd:who rel='http://schemas.google.com/g/2005#event.organizer'
valueString='GData Ops Demo' email='gdata.ops.demo@gmail.com'>
<gd:attendeeStatus value='http://schemas.google.com/g/2005#event.accepted'>
</gd:attendeeStatus>
</gd:who>
<gd:who rel='http://schemas.google.com/g/2005#event.attendee'
valueString='Ryan Boyd (API)' email='api.rboyd@gmail.com'>
<gd:attendeeStatus value='http://schemas.google.com/g/2005#event.invited'>
</gd:attendeeStatus>
</gd:who>
<gd:when startTime='2007-03-24T12:00:00.000-07:00'
endTime='2007-03-24T15:00:00.000-07:00'>
<gd:reminder minutes='20'></gd:reminder>
</gd:when>
<gd:where valueString='Dolores Park with Kim'></gd:where>
</entry>
<entry>
<id>
http://www.google.com/calendar/feeds/default/private/full/uvsqhg7klnae40v50vihr1pvos</id>
<published>2007-03-20T21:28:37.000Z</published>
<updated>2007-03-20T21:28:37.000Z</updated>
<category scheme='http://schemas.google.com/g/2005#kind'
term='http://schemas.google.com/g/2005#event'></category>
<title type='text'>Team meeting</title>
<content type='text'></content>
<link rel='alternate' type='text/html'
href='http://www.google.com/calendar/event?eid=dXZzcWhnN2tsbmFlNDB2NTB2aWhyMXB2b3NfMjAwNzAzMjNUMTYwMDAwWiBnZGF0YS5vcHMuZGVtb0Bt'
title='alternate'></link>
<link rel='self' type='application/atom+xml'
href='http://www.google.com/calendar/feeds/default/private/full/uvsqhg7klnae40v50vihr1pvos'>
</link>
<link rel='edit' type='application/atom+xml'
href='http://www.google.com/calendar/feeds/default/private/full/uvsqhg7klnae40v50vihr1pvos/63310109317'>
</link>
<author>
<name>GData Ops Demo</name>
<email>gdata.ops.demo@gmail.com</email>
</author>
<gd:recurrence>DTSTART;TZID=America/Los_Angeles:20070323T090000
DTEND;TZID=America/Los_Angeles:20070323T100000
RRULE:FREQ=WEEKLY;BYDAY=FR;UNTIL=20070817T160000Z;WKST=SU
BEGIN:VTIMEZONE TZID:America/Los_Angeles
X-LIC-LOCATION:America/Los_Angeles BEGIN:STANDARD
TZOFFSETFROM:-0700 TZOFFSETTO:-0800 TZNAME:PST
DTSTART:19701025T020000 RRULE:FREQ=YEARLY;BYMONTH=10;BYDAY=-1SU
END:STANDARD BEGIN:DAYLIGHT TZOFFSETFROM:-0800 TZOFFSETTO:-0700
TZNAME:PDT DTSTART:19700405T020000
RRULE:FREQ=YEARLY;BYMONTH=4;BYDAY=1SU END:DAYLIGHT
END:VTIMEZONE</gd:recurrence>
<gCal:sendEventNotifications value='true'>
</gCal:sendEventNotifications>
<gd:eventStatus value='http://schemas.google.com/g/2005#event.confirmed'>
</gd:eventStatus>
<gd:visibility value='http://schemas.google.com/g/2005#event.public'>
</gd:visibility>
<gd:transparency value='http://schemas.google.com/g/2005#event.opaque'>
</gd:transparency>
<gd:reminder minutes='10'></gd:reminder>
<gd:where valueString=''></gd:where>
</entry>
<entry>
<id>
http://www.google.com/calendar/feeds/default/private/full/st4vk9kiffs6rasrl32e4a7alo</id>
<published>2007-03-20T21:25:46.000Z</published>
<updated>2007-03-20T21:25:46.000Z</updated>
<category scheme='http://schemas.google.com/g/2005#kind'
term='http://schemas.google.com/g/2005#event'></category>
<title type='text'>Movie with Kim and danah</title>
<content type='text'></content>
<link rel='alternate' type='text/html'
href='http://www.google.com/calendar/event?eid=c3Q0dms5a2lmZnM2cmFzcmwzMmU0YTdhbG8gZ2RhdGEub3BzLmRlbW9AbQ'
title='alternate'></link>
<link rel='self' type='application/atom+xml'
href='http://www.google.com/calendar/feeds/default/private/full/st4vk9kiffs6rasrl32e4a7alo'>
</link>
<link rel='edit' type='application/atom+xml'
href='http://www.google.com/calendar/feeds/default/private/full/st4vk9kiffs6rasrl32e4a7alo/63310109146'>
</link>
<author>
<name>GData Ops Demo</name>
<email>gdata.ops.demo@gmail.com</email>
</author>
<gCal:sendEventNotifications value='false'>
</gCal:sendEventNotifications>
<gd:eventStatus value='http://schemas.google.com/g/2005#event.confirmed'>
</gd:eventStatus>
<gd:comments>
<gd:feedLink href='http://www.google.com/calendar/feeds/default/private/full/st4vk9kiffs6rasrl32e4a7alo/comments'>
</gd:feedLink>
</gd:comments>
<gd:visibility value='http://schemas.google.com/g/2005#event.default'>
</gd:visibility>
<gd:transparency value='http://schemas.google.com/g/2005#event.opaque'>
</gd:transparency>
<gd:when startTime='2007-03-24T20:00:00.000-07:00'
endTime='2007-03-24T21:00:00.000-07:00'>
<gd:reminder minutes='10'></gd:reminder>
</gd:when>
<gd:where></gd:where>
</entry>
<entry>
<id>
http://www.google.com/calendar/feeds/default/private/full/ofl1e45ubtsoh6gtu127cls2oo</id>
<published>2007-03-20T21:24:43.000Z</published>
<updated>2007-03-20T21:25:08.000Z</updated>
<category scheme='http://schemas.google.com/g/2005#kind'
term='http://schemas.google.com/g/2005#event'></category>
<title type='text'>Dinner with Kim and Sarah</title>
<content type='text'></content>
<link rel='alternate' type='text/html'
href='http://www.google.com/calendar/event?eid=b2ZsMWU0NXVidHNvaDZndHUxMjdjbHMyb28gZ2RhdGEub3BzLmRlbW9AbQ'
title='alternate'></link>
<link rel='self' type='application/atom+xml'
href='http://www.google.com/calendar/feeds/default/private/full/ofl1e45ubtsoh6gtu127cls2oo'>
</link>
<link rel='edit' type='application/atom+xml'
href='http://www.google.com/calendar/feeds/default/private/full/ofl1e45ubtsoh6gtu127cls2oo/63310109108'>
</link>
<author>
<name>GData Ops Demo</name>
<email>gdata.ops.demo@gmail.com</email>
</author>
<gCal:sendEventNotifications value='false'>
</gCal:sendEventNotifications>
<gd:eventStatus value='http://schemas.google.com/g/2005#event.confirmed'>
</gd:eventStatus>
<gd:comments>
<gd:feedLink href='http://www.google.com/calendar/feeds/default/private/full/ofl1e45ubtsoh6gtu127cls2oo/comments'>
</gd:feedLink>
</gd:comments>
<gd:visibility value='http://schemas.google.com/g/2005#event.default'>
</gd:visibility>
<gd:transparency value='http://schemas.google.com/g/2005#event.opaque'>
</gd:transparency>
<gd:when startTime='2007-03-20T19:00:00.000-07:00'
endTime='2007-03-20T21:30:00.000-07:00'>
<gd:reminder minutes='10'></gd:reminder>
</gd:when>
<gd:where></gd:where>
</entry>
<entry>
<id>
http://www.google.com/calendar/feeds/default/private/full/b69s2avfi2joigsclecvjlc91g</id>
<published>2007-03-20T21:24:19.000Z</published>
<updated>2007-03-20T21:25:05.000Z</updated>
<category scheme='http://schemas.google.com/g/2005#kind'
term='http://schemas.google.com/g/2005#event'></category>
<title type='text'>Dinner with Jane and John</title>
<content type='text'></content>
<link rel='alternate' type='text/html'
href='http://www.google.com/calendar/event?eid=YjY5czJhdmZpMmpvaWdzY2xlY3ZqbGM5MWcgZ2RhdGEub3BzLmRlbW9AbQ'
title='alternate'></link>
<link rel='self' type='application/atom+xml'
href='http://www.google.com/calendar/feeds/default/private/full/b69s2avfi2joigsclecvjlc91g'>
</link>
<link rel='edit' type='application/atom+xml'
href='http://www.google.com/calendar/feeds/default/private/full/b69s2avfi2joigsclecvjlc91g/63310109105'>
</link>
<author>
<name>GData Ops Demo</name>
<email>gdata.ops.demo@gmail.com</email>
</author>
<gCal:sendEventNotifications value='false'>
</gCal:sendEventNotifications>
<gd:eventStatus value='http://schemas.google.com/g/2005#event.confirmed'>
</gd:eventStatus>
<gd:comments>
<gd:feedLink href='http://www.google.com/calendar/feeds/default/private/full/b69s2avfi2joigsclecvjlc91g/comments'>
</gd:feedLink>
</gd:comments>
<gd:visibility value='http://schemas.google.com/g/2005#event.default'>
</gd:visibility>
<gd:transparency value='http://schemas.google.com/g/2005#event.opaque'>
</gd:transparency>
<gd:when startTime='2007-03-22T17:00:00.000-07:00'
endTime='2007-03-22T19:30:00.000-07:00'>
<gd:reminder minutes='10'></gd:reminder>
</gd:when>
<gd:where></gd:where>
</entry>
<entry>
<id>
http://www.google.com/calendar/feeds/default/private/full/u9p66kkiotn8bqh9k7j4rcnjjc</id>
<published>2007-03-20T21:24:33.000Z</published>
<updated>2007-03-20T21:24:33.000Z</updated>
<category scheme='http://schemas.google.com/g/2005#kind'
term='http://schemas.google.com/g/2005#event'></category>
<title type='text'>Tennis with Elizabeth</title>
<content type='text'></content>
<link rel='alternate' type='text/html'
href='http://www.google.com/calendar/event?eid=dTlwNjZra2lvdG44YnFoOWs3ajRyY25qamMgZ2RhdGEub3BzLmRlbW9AbQ'
title='alternate'></link>
<link rel='self' type='application/atom+xml'
href='http://www.google.com/calendar/feeds/default/private/full/u9p66kkiotn8bqh9k7j4rcnjjc'>
</link>
<link rel='edit' type='application/atom+xml'
href='http://www.google.com/calendar/feeds/default/private/full/u9p66kkiotn8bqh9k7j4rcnjjc/63310109073'>
</link>
<author>
<name>GData Ops Demo</name>
<email>gdata.ops.demo@gmail.com</email>
</author>
<gCal:sendEventNotifications value='false'>
</gCal:sendEventNotifications>
<gd:eventStatus value='http://schemas.google.com/g/2005#event.confirmed'>
</gd:eventStatus>
<gd:comments>
<gd:feedLink href='http://www.google.com/calendar/feeds/default/private/full/u9p66kkiotn8bqh9k7j4rcnjjc/comments'>
</gd:feedLink>
</gd:comments>
<gd:visibility value='http://schemas.google.com/g/2005#event.default'>
</gd:visibility>
<gd:transparency value='http://schemas.google.com/g/2005#event.opaque'>
</gd:transparency>
<gd:when startTime='2007-03-24T10:00:00.000-07:00'
endTime='2007-03-24T11:00:00.000-07:00'>
<gd:reminder minutes='10'></gd:reminder>
</gd:when>
<gd:where></gd:where>
</entry>
<entry>
<id>
http://www.google.com/calendar/feeds/default/private/full/76oj2kceidob3s708tvfnuaq3c</id>
<published>2007-03-20T21:24:00.000Z</published>
<updated>2007-03-20T21:24:00.000Z</updated>
<category scheme='http://schemas.google.com/g/2005#kind'
term='http://schemas.google.com/g/2005#event'></category>
<title type='text'>Lunch with Jenn</title>
<content type='text'></content>
<link rel='alternate' type='text/html'
href='http://www.google.com/calendar/event?eid=NzZvajJrY2VpZG9iM3M3MDh0dmZudWFxM2MgZ2RhdGEub3BzLmRlbW9AbQ'
title='alternate'></link>
<link rel='self' type='application/atom+xml'
href='http://www.google.com/calendar/feeds/default/private/full/76oj2kceidob3s708tvfnuaq3c'>
</link>
<link rel='edit' type='application/atom+xml'
href='http://www.google.com/calendar/feeds/default/private/full/76oj2kceidob3s708tvfnuaq3c/63310109040'>
</link>
<author>
<name>GData Ops Demo</name>
<email>gdata.ops.demo@gmail.com</email>
</author>
<gCal:sendEventNotifications value='false'>
</gCal:sendEventNotifications>
<gd:eventStatus value='http://schemas.google.com/g/2005#event.confirmed'>
</gd:eventStatus>
<gd:comments>
<gd:feedLink href='http://www.google.com/calendar/feeds/default/private/full/76oj2kceidob3s708tvfnuaq3c/comments'>
</gd:feedLink>
</gd:comments>
<gd:visibility value='http://schemas.google.com/g/2005#event.default'>
</gd:visibility>
<gd:transparency value='http://schemas.google.com/g/2005#event.opaque'>
</gd:transparency>
<gd:when startTime='2007-03-20T11:30:00.000-07:00'
endTime='2007-03-20T12:30:00.000-07:00'>
<gd:reminder minutes='10'></gd:reminder>
</gd:when>
<gd:where></gd:where>
</entry>
<entry>
<id>
http://www.google.com/calendar/feeds/default/private/full/5np9ec8m7uoauk1vedh5mhodco</id>
<published>2007-03-20T07:50:02.000Z</published>
<updated>2007-03-20T20:39:26.000Z</updated>
<category scheme='http://schemas.google.com/g/2005#kind'
term='http://schemas.google.com/g/2005#event'></category>
<title type='text'>test entry</title>
<content type='text'>test desc</content>
<link rel='alternate' type='text/html'
href='http://www.google.com/calendar/event?eid=NW5wOWVjOG03dW9hdWsxdmVkaDVtaG9kY28gZ2RhdGEub3BzLmRlbW9AbQ'
title='alternate'></link>
<link rel='self' type='application/atom+xml'
href='http://www.google.com/calendar/feeds/default/private/full/5np9ec8m7uoauk1vedh5mhodco'>
</link>
<link rel='edit' type='application/atom+xml'
href='http://www.google.com/calendar/feeds/default/private/full/5np9ec8m7uoauk1vedh5mhodco/63310106366'>
</link>
<author>
<name>GData Ops Demo</name>
<email>gdata.ops.demo@gmail.com</email>
</author>
<gCal:sendEventNotifications value='false'>
</gCal:sendEventNotifications>
<gd:eventStatus value='http://schemas.google.com/g/2005#event.confirmed'>
</gd:eventStatus>
<gd:comments>
<gd:feedLink href='http://www.google.com/calendar/feeds/default/private/full/5np9ec8m7uoauk1vedh5mhodco/comments'>
</gd:feedLink>
</gd:comments>
<gd:visibility value='http://schemas.google.com/g/2005#event.private'>
</gd:visibility>
<gd:transparency value='http://schemas.google.com/g/2005#event.opaque'>
</gd:transparency>
<gd:who rel='http://schemas.google.com/g/2005#event.attendee'
valueString='Vivian Li' email='vli@google.com'>
<gd:attendeeStatus value='http://schemas.google.com/g/2005#event.declined'>
</gd:attendeeStatus>
</gd:who>
<gd:who rel='http://schemas.google.com/g/2005#event.organizer'
valueString='GData Ops Demo' email='gdata.ops.demo@gmail.com'>
<gd:attendeeStatus value='http://schemas.google.com/g/2005#event.accepted'>
</gd:attendeeStatus>
</gd:who>
<gd:when startTime='2007-03-21T08:00:00.000-07:00'
endTime='2007-03-21T09:00:00.000-07:00'>
<gd:reminder minutes='10'></gd:reminder>
</gd:when>
<gd:where valueString='anywhere'></gd:where>
</entry>
<entry>
<id>
http://www.google.com/calendar/feeds/default/private/full/fu6sl0rqakf3o0a13oo1i1a1mg</id>
<published>2007-02-14T23:23:37.000Z</published>
<updated>2007-02-14T23:25:30.000Z</updated>
<category scheme='http://schemas.google.com/g/2005#kind'
term='http://schemas.google.com/g/2005#event'></category>
<title type='text'>test</title>
<content type='text'></content>
<link rel='alternate' type='text/html'
href='http://www.google.com/calendar/event?eid=ZnU2c2wwcnFha2YzbzBhMTNvbzFpMWExbWcgZ2RhdGEub3BzLmRlbW9AbQ'
title='alternate'></link>
<link rel='self' type='application/atom+xml'
href='http://www.google.com/calendar/feeds/default/private/full/fu6sl0rqakf3o0a13oo1i1a1mg'>
</link>
<link rel='edit' type='application/atom+xml'
href='http://www.google.com/calendar/feeds/default/private/full/fu6sl0rqakf3o0a13oo1i1a1mg/63307178730'>
</link>
<link rel="http://schemas.google.com/gCal/2005/webContent" title="World Cup" href="http://www.google.com/calendar/images/google-holiday.gif" type="image/gif">
<gCal:webContent width="276" height="120" url="http://www.google.com/logos/worldcup06.gif" />
</link>
<author>
<name>GData Ops Demo</name>
<email>gdata.ops.demo@gmail.com</email>
</author>
<gCal:sendEventNotifications value='false'>
</gCal:sendEventNotifications>
<gd:eventStatus value='http://schemas.google.com/g/2005#event.confirmed'>
</gd:eventStatus>
<gd:comments>
<gd:feedLink href='http://www.google.com/calendar/feeds/default/private/full/fu6sl0rqakf3o0a13oo1i1a1mg/comments'>
</gd:feedLink>
</gd:comments>
<gd:visibility value='http://schemas.google.com/g/2005#event.default'>
</gd:visibility>
<gd:transparency value='http://schemas.google.com/g/2005#event.opaque'>
</gd:transparency>
<gd:when startTime='2007-02-15T08:30:00.000-08:00'
endTime='2007-02-15T09:30:00.000-08:00'>
<gd:reminder minutes='10'></gd:reminder>
</gd:when>
<gd:where></gd:where>
</entry>
<entry>
<id>
http://www.google.com/calendar/feeds/default/private/full/h7a0haa4da8sil3rr19ia6luvc</id>
<published>2007-07-16T22:13:28.000Z</published>
<updated>2007-07-16T22:13:29.000Z</updated>
<category scheme='http://schemas.google.com/g/2005#kind'
term='http://schemas.google.com/g/2005#event' />
<title type='text'></title>
<content type='text' />
<link rel='alternate' type='text/html'
href='http://www.google.com/calendar/event?eid=aDdhMGhhYTRkYThzaWwzcnIxOWlhNmx1dmMgZ2RhdGEub3BzLmRlbW9AbQ'
title='alternate' />
<link rel='http://schemas.google.com/gCal/2005/webContent'
type='application/x-google-gadgets+xml'
href='http://gdata.ops.demo.googlepages.com/birthdayicon.gif'
title='Date and Time Gadget'>
<gCal:webContent width='300' height='136'
url='http://google.com/ig/modules/datetime.xml'>
<gCal:webContentGadgetPref name='color' value='green' />
</gCal:webContent>
</link>
<link rel='self' type='application/atom+xml'
href='http://www.google.com/calendar/feeds/default/private/full/h7a0haa4da8sil3rr19ia6luvc' />
<link rel='edit' type='application/atom+xml'
href='http://www.google.com/calendar/feeds/default/private/full/h7a0haa4da8sil3rr19ia6luvc/63320307209' />
<author>
<name>GData Ops Demo</name>
<email>gdata.ops.demo@gmail.com</email>
</author>
<gd:comments>
<gd:feedLink href='http://www.google.com/calendar/feeds/default/private/full/h7a0haa4da8sil3rr19ia6luvc/comments' />
</gd:comments>
<gCal:sendEventNotifications value='false'>
</gCal:sendEventNotifications>
<gd:eventStatus value='http://schemas.google.com/g/2005#event.confirmed' />
<gd:visibility value='http://schemas.google.com/g/2005#event.default' />
<gd:transparency value='http://schemas.google.com/g/2005#event.opaque' />
<gd:when startTime='2007-03-14' endTime='2007-03-15' />
<gd:where />
</entry>
</feed>
"""
CALENDAR_BATCH_REQUEST = """<?xml version='1.0' encoding='utf-8'?>
<feed xmlns='http://www.w3.org/2005/Atom'
xmlns:batch='http://schemas.google.com/gdata/batch'
xmlns:gCal='http://schemas.google.com/gCal/2005'>
<category scheme='http://schemas.google.com/g/2005#kind' term='http://schemas.google.com/g/2005#event' />
<entry>
<batch:id>1</batch:id>
<batch:operation type='insert' />
<category scheme='http://schemas.google.com/g/2005#kind' term='http://schemas.google.com/g/2005#event' />
<title type='text'>Event inserted via batch</title>
</entry>
<entry>
<batch:id>2</batch:id>
<batch:operation type='query' />
<id>http://www.google.com/calendar/feeds/default/private/full/glcs0kv2qqa0gf52qi1jo018gc</id>
<category scheme='http://schemas.google.com/g/2005#kind' term='http://schemas.google.com/g/2005#event' />
<title type='text'>Event queried via batch</title>
</entry>
<entry>
<batch:id>3</batch:id>
<batch:operation type='update' />
<id>http://www.google.com/calendar/feeds/default/private/full/ujm0go5dtngdkr6u91dcqvj0qs</id>
<category scheme='http://schemas.google.com/g/2005#kind' term='http://schemas.google.com/g/2005#event' />
<title type='text'>Event updated via batch</title>
<link rel='alternate' type='text/html'
href='http://www.google.com/calendar/event?eid=dWptMGdvNWR0bmdka3I2dTkxZGNxdmowcXMgaGFyaXNodi50ZXN0QG0' title='alternate' />
<link rel='self' type='application/atom+xml'
href='http://www.google.com/calendar/feeds/default/private/full/ujm0go5dtngdkr6u91dcqvj0qs' />
<link rel='edit' type='application/atom+xml'
href='http://www.google.com/calendar/feeds/default/private/full/ujm0go5dtngdkr6u91dcqvj0qs/63326098791' />
</entry>
<entry>
<batch:id>4</batch:id>
<batch:operation type='delete' />
<id>http://www.google.com/calendar/feeds/default/private/full/d8qbg9egk1n6lhsgq1sjbqffqc</id>
<category scheme='http://schemas.google.com/g/2005#kind' term='http://schemas.google.com/g/2005#event' />
<title type='text'>Event deleted via batch</title>
<link rel='alternate' type='text/html'
href='http://www.google.com/calendar/event?eid=ZDhxYmc5ZWdrMW42bGhzZ3Exc2picWZmcWMgaGFyaXNodi50ZXN0QG0' title='alternate' />
<link rel='self' type='application/atom+xml'
href='http://www.google.com/calendar/feeds/default/private/full/d8qbg9egk1n6lhsgq1sjbqffqc' />
<link rel='edit' type='application/atom+xml'
href='http://www.google.com/calendar/feeds/default/private/full/d8qbg9egk1n6lhsgq1sjbqffqc/63326018324' />
</entry>
</feed>
"""
CALENDAR_BATCH_RESPONSE = """<?xml version='1.0' encoding='UTF-8'?>
<feed xmlns='http://www.w3.org/2005/Atom'
xmlns:batch='http://schemas.google.com/gdata/batch'
xmlns:gCal='http://schemas.google.com/gCal/2005'>
<id>http://www.google.com/calendar/feeds/default/private/full</id>
<updated>2007-09-21T23:01:00.380Z</updated>
<category scheme='http://schemas.google.com/g/2005#kind' term='http://schemas.google.com/g/2005#event'></category>
<title type='text'>Batch Feed</title>
<link rel='http://schemas.google.com/g/2005#feed' type='application/atom+xml'
href='http://www.google.com/calendar/feeds/default/private/full' />
<link rel='http://schemas.google.com/g/2005#post' type='application/atom+xml'
href='http://www.google.com/calendar/feeds/default/private/full' />
<link rel='http://schemas.google.com/g/2005#batch' type='application/atom+xml'
href='http://www.google.com/calendar/feeds/default/private/full/batch' />
<entry>
<batch:id>1</batch:id>
<batch:status code='201' reason='Created' />
<batch:operation type='insert' />
<id>http://www.google.com/calendar/feeds/default/private/full/n9ug78gd9tv53ppn4hdjvk68ek</id>
<category scheme='http://schemas.google.com/g/2005#kind' term='http://schemas.google.com/g/2005#event' />
<title type='text'>Event inserted via batch</title>
<link rel='alternate' type='text/html'
href='http://www.google.com/calendar/event?eid=bjl1Zzc4Z2Q5dHY1M3BwbjRoZGp2azY4ZWsgaGFyaXNodi50ZXN0QG0' title='alternate' />
<link rel='self' type='application/atom+xml'
href='http://www.google.com/calendar/feeds/default/private/full/n9ug78gd9tv53ppn4hdjvk68ek' />
<link rel='edit' type='application/atom+xml'
href='http://www.google.com/calendar/feeds/default/private/full/n9ug78gd9tv53ppn4hdjvk68ek/63326098860' />
</entry>
<entry>
<batch:id>2</batch:id>
<batch:status code='200' reason='Success' />
<batch:operation type='query' />
<id>http://www.google.com/calendar/feeds/default/private/full/glsc0kv2aqa0ff52qi1jo018gc</id>
<category scheme='http://schemas.google.com/g/2005#kind' term='http://schemas.google.com/g/2005#event' />
<title type='text'>Event queried via batch</title>
<link rel='alternate' type='text/html'
href='http://www.google.com/calendar/event?eid=Z2xzYzBrdjJhcWEwZmY1MnFpMWpvMDE4Z2MgaGFyaXNodi50ZXN0QG0' title='alternate' />
<link rel='self' type='application/atom+xml'
href='http://www.google.com/calendar/feeds/default/private/full/glsc0kv2aqa0ff52qi1jo018gc' />
<link rel='edit' type='application/atom+xml'
href='http://www.google.com/calendar/feeds/default/private/full/glsc0kv2aqa0ff52qi1jo018gc/63326098791' />
</entry>
<entry xmlns:gCal='http://schemas.google.com/gCal/2005'>
<batch:id>3</batch:id>
<batch:status code='200' reason='Success' />
<batch:operation type='update' />
<id>http://www.google.com/calendar/feeds/default/private/full/ujm0go5dtngdkr6u91dcqvj0qs</id>
<category scheme='http://schemas.google.com/g/2005#kind' term='http://schemas.google.com/g/2005#event' />
<title type='text'>Event updated via batch</title>
<link rel='alternate' type='text/html'
href='http://www.google.com/calendar/event?eid=dWptMGdvNWR0bmdka3I2dTkxZGNxdmowcXMgaGFyaXNodi50ZXN0QG0' title='alternate' />
<link rel='self' type='application/atom+xml'
href='http://www.google.com/calendar/feeds/default/private/full/ujm0go5dtngdkr6u91dcqvj0qs' />
<link rel='edit' type='application/atom+xml'
href='http://www.google.com/calendar/feeds/default/private/full/ujm0go5dtngdkr6u91dcqvj0qs/63326098860' />
<batch:id>3</batch:id>
<batch:status code='200' reason='Success' />
<batch:operation type='update' />
</entry>
<entry>
<batch:id>4</batch:id>
<batch:status code='200' reason='Success' />
<batch:operation type='delete' />
<id>http://www.google.com/calendar/feeds/default/private/full/d8qbg9egk1n6lhsgq1sjbqffqc</id>
<category scheme='http://schemas.google.com/g/2005#kind' term='http://schemas.google.com/g/2005#event' />
<title type='text'>Event deleted via batch</title>
<content type='text'>Deleted</content>
</entry>
</feed>
"""
GBASE_ATTRIBUTE_FEED = """<?xml version='1.0' encoding='UTF-8'?>
<feed xmlns='http://www.w3.org/2005/Atom' xmlns:openSearch='http://a9.com/-/spec/opensearchrss/1.0/' xmlns:gm='http://base.google.com/ns-metadata/1.0'>
<id>http://www.google.com/base/feeds/attributes</id>
<updated>2006-11-01T20:35:59.578Z</updated>
<category scheme='http://base.google.com/categories/itemtypes' term='online jobs'></category>
<category scheme='http://base.google.com/categories/itemtypes' term='jobs'></category>
<title type='text'>Attribute histogram for query: [item type:jobs]</title>
<link rel='alternate' type='text/html' href='http://base.google.com'></link>
<link rel='http://schemas.google.com/g/2005#feed' type='application/atom+xml' href='http://www.google.com/base/feeds
/attributes'></link>
<link rel='self' type='application/atom+xml' href='http://www.google.com/base/feeds/attributes/-/jobs'></link>
<generator version='1.0' uri='http://base.google.com'>GoogleBase</generator>
<openSearch:totalResults>16</openSearch:totalResults>
<openSearch:startIndex>1</openSearch:startIndex>
<openSearch:itemsPerPage>16</openSearch:itemsPerPage>
<entry>
<id>http://www.google.com/base/feeds/attributes/job+industry%28text%29N%5Bitem+type%3Ajobs%5D</id>
<updated>2006-11-01T20:36:00.100Z</updated>
<title type='text'>job industry(text)</title>
<content type='text'>Attribute"job industry" of type text.
</content>
<link rel='self' type='application/atom+xml' href='http://www.google.com/base/feeds/attributes/job+industry%28text
%29N%5Bitem+type%3Ajobs%5D'></link>
<gm:attribute name='job industry' type='text' count='4416629'>
<gm:value count='380772'>it internet</gm:value>
<gm:value count='261565'>healthcare</gm:value>
<gm:value count='142018'>information technology</gm:value>
<gm:value count='124622'>accounting</gm:value>
<gm:value count='111311'>clerical and administrative</gm:value>
<gm:value count='82928'>other</gm:value>
<gm:value count='77620'>sales and sales management</gm:value>
<gm:value count='68764'>information systems</gm:value>
<gm:value count='65859'>engineering and architecture</gm:value>
<gm:value count='64757'>sales</gm:value>
</gm:attribute>
</entry>
</feed>
"""
GBASE_ATTRIBUTE_ENTRY = """<?xml version='1.0' encoding='UTF-8'?>
<entry xmlns='http://www.w3.org/2005/Atom' xmlns:openSearch='http://a9.com/-/spec/opensearchrss/1.0/' xmlns:gm='http://base.google.com/ns-metadata/1.0'>
<id>http://www.google.com/base/feeds/attributes/job+industry%28text%29N%5Bitem+type%3Ajobs%5D</id>
<updated>2006-11-01T20:36:00.100Z</updated>
<title type='text'>job industry(text)</title>
<content type='text'>Attribute"job industry" of type text.
</content>
<link rel='self' type='application/atom+xml' href='http://www.google.com/base/feeds/attributes/job+industry%28text%29N%5Bitem+type%3Ajobs%5D'></link>
<gm:attribute name='job industry' type='text' count='4416629'>
<gm:value count='380772'>it internet</gm:value>
<gm:value count='261565'>healthcare</gm:value>
<gm:value count='142018'>information technology</gm:value>
<gm:value count='124622'>accounting</gm:value>
<gm:value count='111311'>clerical and administrative</gm:value>
<gm:value count='82928'>other</gm:value>
<gm:value count='77620'>sales and sales management</gm:value>
<gm:value count='68764'>information systems</gm:value>
<gm:value count='65859'>engineering and architecture</gm:value>
<gm:value count='64757'>sales</gm:value>
</gm:attribute>
</entry>
"""
GBASE_LOCALES_FEED = """<?xml version='1.0' encoding='UTF-8'?>
<feed xmlns='http://www.w3.org/2005/Atom'
xmlns:openSearch='http://a9.com/-/spec/opensearchrss/1.0/'
xmlns:gm='http://base.google.com/ns-metadata/1.0'>
<id> http://www.google.com/base/feeds/locales/</id>
<updated>2006-06-13T18:11:40.120Z</updated>
<title type="text">Locales</title>
<link rel="alternate" type="text/html" href="http://base.google.com"/>
<link rel="http://schemas.google.com/g/2005#feed" type="application/atom+xml"
href="http://www.google.com/base/feeds/locales/"/>
<link rel="self" type="application/atom+xml" href="http://www.google.com/base/feeds/locales/"/>
<author>
<name>Google Inc.</name>
<email>base@google.com</email>
</author>
<generator version="1.0" uri="http://base.google.com">GoogleBase</generator>
<openSearch:totalResults>3</openSearch:totalResults>
<openSearch:itemsPerPage>25</openSearch:itemsPerPage>
<entry>
<id>http://www.google.com/base/feeds/locales/en_US</id>
<updated>2006-03-27T22:27:36.658Z</updated>
<category scheme="http://base.google.com/categories/locales" term="en_US"/>
<title type="text">en_US</title>
<content type="text">en_US</content>
<link rel="self" type="application/atom+xml"
href="http://www.google.com/base/feeds/locales/en_US"></link>
<link rel="related" type="application/atom+xml"
href="http://www.google.com/base/feeds/itemtypes/en_US" title="Item types in en_US"/>
</entry>
<entry>
<id>http://www.google.com/base/feeds/locales/en_GB</id>
<updated>2006-06-13T18:14:18.601Z</updated>
<category scheme="http://base.google.com/categories/locales" term="en_GB"/>
<title type="text">en_GB</title>
<content type="text">en_GB</content>
<link rel="related" type="application/atom+xml"
href="http://www.google.com/base/feeds/itemtypes/en_GB" title="Item types in en_GB"/>
<link rel="self" type="application/atom+xml"
href="http://www.google.com/base/feeds/locales/en_GB"/>
</entry>
<entry>
<id>http://www.google.com/base/feeds/locales/de_DE</id>
<updated>2006-06-13T18:14:18.601Z</updated>
<category scheme="http://base.google.com/categories/locales" term="de_DE"/>
<title type="text">de_DE</title>
<content type="text">de_DE</content>
<link rel="related" type="application/atom+xml"
href="http://www.google.com/base/feeds/itemtypes/de_DE" title="Item types in de_DE"/>
<link rel="self" type="application/atom+xml"
href="http://www.google.com/base/feeds/locales/de_DE"/>
</entry>
</feed>"""
GBASE_STRING_ENCODING_ENTRY = """<?xml version='1.0' encoding='UTF-8'?>
<entry xmlns='http://www.w3.org/2005/Atom' xmlns:gm='http://base.google.com/ns-metadata/1.0'
xmlns:g='http://base.google.com/ns/1.0' xmlns:batch='http://schemas.google.com/gdata/batch'>
<id>http://www.google.com/base/feeds/snippets/17495780256183230088</id>
<published>2007-12-09T03:13:07.000Z</published>
<updated>2008-01-07T03:26:46.000Z</updated>
<category scheme='http://base.google.com/categories/itemtypes' term='Products'/>
<title type='text'>Digital Camera Cord Fits SONY Cybershot DSC-R1 S40</title>
<content type='html'>SONY \xC2\xB7 Cybershot Digital Camera Usb Cable DESCRIPTION
This is a 2.5 USB 2.0 A to Mini B (5 Pin) high quality digital camera
cable used for connecting your Sony Digital Cameras and Camcoders. Backward
Compatible with USB 2.0, 1.0 and 1.1. Fully ...</content>
<link rel='alternate' type='text/html'
href='http://adfarm.mediaplex.com/ad/ck/711-5256-8196-2?loc=http%3A%2F%2Fcgi.ebay.com%2FDigital-Camera-Cord-Fits-SONY-Cybershot-DSC-R1-S40_W0QQitemZ270195049057QQcmdZViewItem'/>
<link rel='self' type='application/atom+xml'
href='http://www.google.com/base/feeds/snippets/17495780256183230088'/>
<author>
<name>eBay</name>
</author>
<g:item_type type='text'>Products</g:item_type>
<g:item_language type='text'>EN</g:item_language>
<g:target_country type='text'>US</g:target_country>
<g:price type='floatUnit'>0.99 usd</g:price>
<g:image_link type='url'>http://thumbs.ebaystatic.com/pict/270195049057_1.jpg</g:image_link>
<g:category type='text'>Cameras & Photo>Digital Camera Accessories>Cables</g:category>
<g:category type='text'>Cords & Connectors>USB Cables>For Other Brands</g:category>
<g:customer_id type='int'>11729</g:customer_id>
<g:id type='text'>270195049057</g:id>
<g:expiration_date type='dateTime'>2008-02-06T03:26:46Z</g:expiration_date>
</entry>"""
RECURRENCE_EXCEPTION_ENTRY = """<entry xmlns='http://www.w3.org/2005/Atom'
xmlns:openSearch='http://a9.com/-/spec/opensearchrss/1.0/'
xmlns:gd='http://schemas.google.com/g/2005'
xmlns:gCal='http://schemas.google.com/gCal/2005'>
<id>
http://www.google.com/calendar/feeds/default/private/composite/i7lgfj69mjqjgnodklif3vbm7g</id>
<published>2007-04-05T21:51:49.000Z</published>
<updated>2007-04-05T21:51:49.000Z</updated>
<category scheme='http://schemas.google.com/g/2005#kind'
term='http://schemas.google.com/g/2005#event'></category>
<title type='text'>testDavid</title>
<content type='text'></content>
<link rel='alternate' type='text/html'
href='http://www.google.com/calendar/event?eid=aTdsZ2ZqNjltanFqZ25vZGtsaWYzdmJtN2dfMjAwNzA0MDNUMTgwMDAwWiBnZGF0YS5vcHMudGVzdEBt'
title='alternate'></link>
<link rel='self' type='application/atom+xml'
href='http://www.google.com/calendar/feeds/default/private/composite/i7lgfj69mjqjgnodklif3vbm7g'>
</link>
<author>
<name>gdata ops</name>
<email>gdata.ops.test@gmail.com</email>
</author>
<gd:visibility value='http://schemas.google.com/g/2005#event.default'>
</gd:visibility>
<gCal:sendEventNotifications value='true'>
</gCal:sendEventNotifications>
<gd:transparency value='http://schemas.google.com/g/2005#event.opaque'>
</gd:transparency>
<gd:eventStatus value='http://schemas.google.com/g/2005#event.confirmed'>
</gd:eventStatus>
<gd:recurrence>DTSTART;TZID=America/Anchorage:20070403T100000
DTEND;TZID=America/Anchorage:20070403T110000
RRULE:FREQ=DAILY;UNTIL=20070408T180000Z;WKST=SU
EXDATE;TZID=America/Anchorage:20070407T100000
EXDATE;TZID=America/Anchorage:20070405T100000
EXDATE;TZID=America/Anchorage:20070404T100000 BEGIN:VTIMEZONE
TZID:America/Anchorage X-LIC-LOCATION:America/Anchorage
BEGIN:STANDARD TZOFFSETFROM:-0800 TZOFFSETTO:-0900 TZNAME:AKST
DTSTART:19701025T020000 RRULE:FREQ=YEARLY;BYMONTH=10;BYDAY=-1SU
END:STANDARD BEGIN:DAYLIGHT TZOFFSETFROM:-0900 TZOFFSETTO:-0800
TZNAME:AKDT DTSTART:19700405T020000
RRULE:FREQ=YEARLY;BYMONTH=4;BYDAY=1SU END:DAYLIGHT
END:VTIMEZONE</gd:recurrence>
<gd:where valueString=''></gd:where>
<gd:reminder minutes='10'></gd:reminder>
<gd:recurrenceException specialized='true'>
<gd:entryLink>
<entry>
<id>i7lgfj69mjqjgnodklif3vbm7g_20070407T180000Z</id>
<published>2007-04-05T21:51:49.000Z</published>
<updated>2007-04-05T21:52:58.000Z</updated>
<category scheme='http://schemas.google.com/g/2005#kind'
term='http://schemas.google.com/g/2005#event'></category>
<title type='text'>testDavid</title>
<content type='text'></content>
<link rel='alternate' type='text/html'
href='http://www.google.com/calendar/event?eid=aTdsZ2ZqNjltanFqZ25vZGtsaWYzdmJtN2dfMjAwNzA0MDdUMTgwMDAwWiBnZGF0YS5vcHMudGVzdEBt'
title='alternate'></link>
<author>
<name>gdata ops</name>
<email>gdata.ops.test@gmail.com</email>
</author>
<gd:visibility value='http://schemas.google.com/g/2005#event.default'>
</gd:visibility>
<gd:originalEvent id='i7lgfj69mjqjgnodklif3vbm7g'
href='http://www.google.com/calendar/feeds/default/private/composite/i7lgfj69mjqjgnodklif3vbm7g'>
<gd:when startTime='2007-04-07T13:00:00.000-05:00'>
</gd:when>
</gd:originalEvent>
<gCal:sendEventNotifications value='false'>
</gCal:sendEventNotifications>
<gd:transparency value='http://schemas.google.com/g/2005#event.opaque'>
</gd:transparency>
<gd:eventStatus value='http://schemas.google.com/g/2005#event.canceled'>
</gd:eventStatus>
<gd:comments>
<gd:feedLink href='http://www.google.com/calendar/feeds/default/private/full/i7lgfj69mjqjgnodklif3vbm7g_20070407T180000Z/comments'>
<feed>
<updated>2007-04-05T21:54:09.285Z</updated>
<category scheme='http://schemas.google.com/g/2005#kind'
term='http://schemas.google.com/g/2005#message'>
</category>
<title type='text'>Comments for: testDavid</title>
<link rel='alternate' type='text/html'
href='http://www.google.com/calendar/feeds/default/private/full/i7lgfj69mjqjgnodklif3vbm7g_20070407T180000Z/comments'
title='alternate'></link>
</feed>
</gd:feedLink>
</gd:comments>
<gd:when startTime='2007-04-07T13:00:00.000-05:00'
endTime='2007-04-07T14:00:00.000-05:00'>
<gd:reminder minutes='10'></gd:reminder>
</gd:when>
<gd:where valueString=''></gd:where>
</entry>
</gd:entryLink>
</gd:recurrenceException>
</entry>"""
NICK_ENTRY = """<?xml version="1.0" encoding="UTF-8"?>
<atom:entry xmlns:atom="http://www.w3.org/2005/Atom"
xmlns:apps="http://schemas.google.com/apps/2006"
xmlns:gd="http://schemas.google.com/g/2005">
<atom:id>https://www.google.com/a/feeds/example.com/nickname/2.0/Foo</atom:id>
<atom:updated>1970-01-01T00:00:00.000Z</atom:updated>
<atom:category scheme='http://schemas.google.com/g/2005#kind'
term='http://schemas.google.com/apps/2006#nickname'/>
<atom:title type="text">Foo</atom:title>
<atom:link rel="self" type="application/atom+xml"
href="https://www.google.com/a/feeds/example.com/nickname/2.0/Foo"/>
<atom:link rel="edit" type="application/atom+xml"
href="https://www.google.com/a/feeds/example.com/nickname/2.0/Foo"/>
<apps:nickname name="Foo"/>
<apps:login userName="TestUser"/>
</atom:entry>"""
NICK_FEED = """<?xml version="1.0" encoding="UTF-8"?>
<atom:feed xmlns:atom="http://www.w3.org/2005/Atom"
xmlns:openSearch="http://a9.com/-/spec/opensearchrss/1.0/"
xmlns:apps="http://schemas.google.com/apps/2006">
<atom:id>
http://www.google.com/a/feeds/example.com/nickname/2.0
</atom:id>
<atom:updated>1970-01-01T00:00:00.000Z</atom:updated>
<atom:category scheme='http://schemas.google.com/g/2005#kind'
term='http://schemas.google.com/apps/2006#nickname'/>
<atom:title type="text">Nicknames for user SusanJones</atom:title>
<atom:link rel='http://schemas.google.com/g/2005#feed'
type="application/atom+xml"
href="http://www.google.com/a/feeds/example.com/nickname/2.0"/>
<atom:link rel='http://schemas.google.com/g/2005#post'
type="application/atom+xml"
href="http://www.google.com/a/feeds/example.com/nickname/2.0"/>
<atom:link rel="self" type="application/atom+xml"
href="http://www.google.com/a/feeds/example.com/nickname/2.0?username=TestUser"/>
<openSearch:startIndex>1</openSearch:startIndex>
<openSearch:itemsPerPage>2</openSearch:itemsPerPage>
<atom:entry>
<atom:id>
http://www.google.com/a/feeds/example.com/nickname/2.0/Foo
</atom:id>
<atom:category scheme='http://schemas.google.com/g/2005#kind'
term='http://schemas.google.com/apps/2006#nickname'/>
<atom:title type="text">Foo</atom:title>
<atom:link rel="self" type="application/atom+xml"
href="http://www.google.com/a/feeds/example.com/nickname/2.0/Foo"/>
<atom:link rel="edit" type="application/atom+xml"
href="http://www.google.com/a/feeds/example.com/nickname/2.0/Foo"/>
<apps:nickname name="Foo"/>
<apps:login userName="TestUser"/>
</atom:entry>
<atom:entry>
<atom:id>
http://www.google.com/a/feeds/example.com/nickname/2.0/suse
</atom:id>
<atom:category scheme='http://schemas.google.com/g/2005#kind'
term='http://schemas.google.com/apps/2006#nickname'/>
<atom:title type="text">suse</atom:title>
<atom:link rel="self" type="application/atom+xml"
href="http://www.google.com/a/feeds/example.com/nickname/2.0/Bar"/>
<atom:link rel="edit" type="application/atom+xml"
href="http://www.google.com/a/feeds/example.com/nickname/2.0/Bar"/>
<apps:nickname name="Bar"/>
<apps:login userName="TestUser"/>
</atom:entry>
</atom:feed>"""
USER_ENTRY = """<?xml version="1.0" encoding="UTF-8"?>
<atom:entry xmlns:atom="http://www.w3.org/2005/Atom"
xmlns:apps="http://schemas.google.com/apps/2006"
xmlns:gd="http://schemas.google.com/g/2005">
<atom:id>https://www.google.com/a/feeds/example.com/user/2.0/TestUser</atom:id>
<atom:updated>1970-01-01T00:00:00.000Z</atom:updated>
<atom:category scheme='http://schemas.google.com/g/2005#kind'
term='http://schemas.google.com/apps/2006#user'/>
<atom:title type="text">TestUser</atom:title>
<atom:link rel="self" type="application/atom+xml"
href="https://www.google.com/a/feeds/example.com/user/2.0/TestUser"/>
<atom:link rel="edit" type="application/atom+xml"
href="https://www.google.com/a/feeds/example.com/user/2.0/TestUser"/>
<apps:login userName="TestUser" password="password" suspended="false"
ipWhitelisted='false' hashFunctionName="SHA-1"/>
<apps:name familyName="Test" givenName="User"/>
<apps:quota limit="1024"/>
<gd:feedLink rel='http://schemas.google.com/apps/2006#user.nicknames'
href="https://www.google.com/a/feeds/example.com/nickname/2.0?username=Test-3121"/>
<gd:feedLink rel='http://schemas.google.com/apps/2006#user.emailLists'
href="https://www.google.com/a/feeds/example.com/emailList/2.0?recipient=testlist@example.com"/>
</atom:entry>"""
USER_FEED = """<?xml version="1.0" encoding="UTF-8"?>
<atom:feed xmlns:atom="http://www.w3.org/2005/Atom"
xmlns:apps="http://schemas.google.com/apps/2006"
xmlns:openSearch="http://a9.com/-/spec/opensearchrss/1.0/"
xmlns:gd="http://schemas.google.com/g/2005">
<atom:id>
http://www.google.com/a/feeds/example.com/user/2.0
</atom:id>
<atom:updated>1970-01-01T00:00:00.000Z</atom:updated>
<atom:category scheme='http://schemas.google.com/g/2005#kind'
term='http://schemas.google.com/apps/2006#user'/>
<atom:title type="text">Users</atom:title>
<atom:link rel="next" type="application/atom+xml"
href="http://www.google.com/a/feeds/example.com/user/2.0?startUsername=john"/>
<atom:link rel='http://schemas.google.com/g/2005#feed'
type="application/atom+xml"
href="http://www.google.com/a/feeds/example.com/user/2.0"/>
<atom:link rel='http://schemas.google.com/g/2005#post'
type="application/atom+xml"
href="http://www.google.com/a/feeds/example.com/user/2.0"/>
<atom:link rel="self" type="application/atom+xml"
href="http://www.google.com/a/feeds/example.com/user/2.0"/>
<openSearch:startIndex>1</openSearch:startIndex>
<atom:entry>
<atom:id>
http://www.google.com/a/feeds/example.com/user/2.0/TestUser
</atom:id>
<atom:category scheme='http://schemas.google.com/g/2005#kind'
term='http://schemas.google.com/apps/2006#user'/>
<atom:title type="text">TestUser</atom:title>
<atom:link rel="self" type="application/atom+xml"
href="http://www.google.com/a/feeds/example.com/user/2.0/TestUser"/>
<atom:link rel="edit" type="application/atom+xml"
href="http://www.google.com/a/feeds/example.com/user/2.0/TestUser"/>
<gd:who rel='http://schemas.google.com/apps/2006#user.recipient'
email="TestUser@example.com"/>
<apps:login userName="TestUser" suspended="false"/>
<apps:quota limit="2048"/>
<apps:name familyName="Test" givenName="User"/>
<gd:feedLink rel='http://schemas.google.com/apps/2006#user.nicknames'
href="http://www.google.com/a/feeds/example.com/nickname/2.0?username=TestUser"/>
<gd:feedLink rel='http://schemas.google.com/apps/2006#user.emailLists'
href="http://www.google.com/a/feeds/example.com/emailList/2.0?recipient=TestUser@example.com"/>
</atom:entry>
<atom:entry>
<atom:id>
http://www.google.com/a/feeds/example.com/user/2.0/JohnSmith
</atom:id>
<atom:category scheme='http://schemas.google.com/g/2005#kind'
term='http://schemas.google.com/apps/2006#user'/>
<atom:title type="text">JohnSmith</atom:title>
<atom:link rel="self" type="application/atom+xml"
href="http://www.google.com/a/feeds/example.com/user/2.0/JohnSmith"/>
<atom:link rel="edit" type="application/atom+xml"
href="http://www.google.com/a/feeds/example.com/user/2.0/JohnSmith"/>
<gd:who rel='http://schemas.google.com/apps/2006#user.recipient'
email="JohnSmith@example.com"/>
<apps:login userName="JohnSmith" suspended="false"/>
<apps:quota limit="2048"/>
<apps:name familyName="Smith" givenName="John"/>
<gd:feedLink rel='http://schemas.google.com/apps/2006#user.nicknames'
href="http://www.google.com/a/feeds/example.com/nickname/2.0?username=JohnSmith"/>
<gd:feedLink rel='http://schemas.google.com/apps/2006#user.emailLists'
href="http://www.google.com/a/feeds/example.com/emailList/2.0?recipient=JohnSmith@example.com"/>
</atom:entry>
</atom:feed>"""
EMAIL_LIST_ENTRY = """<?xml version="1.0" encoding="UTF-8"?>
<atom:entry xmlns:atom="http://www.w3.org/2005/Atom"
xmlns:apps="http://schemas.google.com/apps/2006"
xmlns:gd="http://schemas.google.com/g/2005">
<atom:id>
https://www.google.com/a/feeds/example.com/emailList/2.0/testlist
</atom:id>
<atom:updated>1970-01-01T00:00:00.000Z</atom:updated>
<atom:category scheme='http://schemas.google.com/g/2005#kind'
term='http://schemas.google.com/apps/2006#emailList'/>
<atom:title type="text">testlist</atom:title>
<atom:link rel="self" type="application/atom+xml"
href="https://www.google.com/a/feeds/example.com/emailList/2.0/testlist"/>
<atom:link rel="edit" type="application/atom+xml"
href="https://www.google.com/a/feeds/example.com/emailList/2.0/testlist"/>
<apps:emailList name="testlist"/>
<gd:feedLink rel='http://schemas.google.com/apps/2006#emailList.recipients'
href="http://www.google.com/a/feeds/example.com/emailList/2.0/testlist/recipient/"/>
</atom:entry>"""
EMAIL_LIST_FEED = """<?xml version="1.0" encoding="UTF-8"?>
<atom:feed xmlns:atom="http://www.w3.org/2005/Atom"
xmlns:openSearch="http://a9.com/-/spec/opensearchrss/1.0/"
xmlns:apps="http://schemas.google.com/apps/2006"
xmlns:gd="http://schemas.google.com/g/2005">
<atom:id>
http://www.google.com/a/feeds/example.com/emailList/2.0
</atom:id>
<atom:updated>1970-01-01T00:00:00.000Z</atom:updated>
<atom:category scheme='http://schemas.google.com/g/2005#kind'
term='http://schemas.google.com/apps/2006#emailList'/>
<atom:title type="text">EmailLists</atom:title>
<atom:link rel="next" type="application/atom+xml"
href="http://www.google.com/a/feeds/example.com/emailList/2.0?startEmailListName=john"/>
<atom:link rel='http://schemas.google.com/g/2005#feed'
type="application/atom+xml"
href="http://www.google.com/a/feeds/example.com/emailList/2.0"/>
<atom:link rel='http://schemas.google.com/g/2005#post'
type="application/atom+xml"
href="http://www.google.com/a/feeds/example.com/emailList/2.0"/>
<atom:link rel="self" type="application/atom+xml"
href="http://www.google.com/a/feeds/example.com/emailList/2.0"/>
<openSearch:startIndex>1</openSearch:startIndex>
<atom:entry>
<atom:id>
http://www.google.com/a/feeds/example.com/emailList/2.0/us-sales
</atom:id>
<atom:updated>1970-01-01T00:00:00.000Z</atom:updated>
<atom:category scheme='http://schemas.google.com/g/2005#kind'
term='http://schemas.google.com/apps/2006#emailList'/>
<atom:title type="text">us-sales</atom:title>
<atom:link rel="self" type="application/atom+xml"
href="http://www.google.com/a/feeds/example.com/emailList/2.0/us-sales"/>
<atom:link rel="edit" type="application/atom+xml"
href="http://www.google.com/a/feeds/example.com/emailList/2.0/us-sales"/>
<apps:emailList name="us-sales"/>
<gd:feedLink rel='http://schemas.google.com/apps/2006#emailList.recipients'
href="http://www.google.com/a/feeds/example.com/emailList/2.0/us-sales/recipient/"/>
</atom:entry>
<atom:entry>
<atom:id>
http://www.google.com/a/feeds/example.com/emailList/2.0/us-eng
</atom:id>
<atom:updated>1970-01-01T00:00:00.000Z</atom:updated>
<atom:category scheme='http://schemas.google.com/g/2005#kind'
term='http://schemas.google.com/apps/2006#emailList'/>
<atom:title type="text">us-eng</atom:title>
<atom:link rel="self" type="application/atom+xml"
href="http://www.google.com/a/feeds/example.com/emailList/2.0/us-eng"/>
<atom:link rel="edit" type="application/atom+xml"
href="http://www.google.com/a/feeds/example.com/emailList/2.0/us-eng"/>
<apps:emailList name="us-eng"/>
<gd:feedLink rel='http://schemas.google.com/apps/2006#emailList.recipients'
href="http://www.google.com/a/feeds/example.com/emailList/2.0/us-eng/recipient/"/>
</atom:entry>
</atom:feed>"""
EMAIL_LIST_RECIPIENT_ENTRY = """<?xml version="1.0" encoding="UTF-8"?>
<atom:entry xmlns:atom="http://www.w3.org/2005/Atom"
xmlns:apps="http://schemas.google.com/apps/2006"
xmlns:gd="http://schemas.google.com/g/2005">
<atom:id>https://www.google.com/a/feeds/example.com/emailList/2.0/us-sales/recipient/TestUser%40example.com</atom:id>
<atom:updated>1970-01-01T00:00:00.000Z</atom:updated>
<atom:category scheme='http://schemas.google.com/g/2005#kind'
term='http://schemas.google.com/apps/2006#emailList.recipient'/>
<atom:title type="text">TestUser</atom:title>
<atom:link rel="self" type="application/atom+xml"
href="https://www.google.com/a/feeds/example.com/emailList/2.0/us-sales/recipient/TestUser%40example.com"/>
<atom:link rel="edit" type="application/atom+xml"
href="https://www.google.com/a/feeds/example.com/emailList/2.0/us-sales/recipient/TestUser%40example.com"/>
<gd:who email="TestUser@example.com"/>
</atom:entry>"""
EMAIL_LIST_RECIPIENT_FEED = """<?xml version="1.0" encoding="UTF-8"?>
<atom:feed xmlns:atom="http://www.w3.org/2005/Atom"
xmlns:openSearch="http://a9.com/-/spec/opensearchrss/1.0/"
xmlns:gd="http://schemas.google.com/g/2005">
<atom:id>
http://www.google.com/a/feeds/example.com/emailList/2.0/us-sales/recipient
</atom:id>
<atom:updated>1970-01-01T00:00:00.000Z</atom:updated>
<atom:category scheme='http://schemas.google.com/g/2005#kind'
term='http://schemas.google.com/apps/2006#emailList.recipient'/>
<atom:title type="text">Recipients for email list us-sales</atom:title>
<atom:link rel="next" type="application/atom+xml"
href="http://www.google.com/a/feeds/example.com/emailList/2.0/us-sales/recipient/?startRecipient=terry@example.com"/>
<atom:link rel='http://schemas.google.com/g/2005#feed'
type="application/atom+xml"
href="http://www.google.com/a/feeds/example.com/emailList/2.0/us-sales/recipient"/>
<atom:link rel='http://schemas.google.com/g/2005#post'
type="application/atom+xml"
href="http://www.google.com/a/feeds/example.com/emailList/2.0/us-sales/recipient"/>
<atom:link rel="self" type="application/atom+xml"
href="http://www.google.com/a/feeds/example.com/emailList/2.0/us-sales/recipient"/>
<openSearch:startIndex>1</openSearch:startIndex>
<atom:entry>
<atom:id>
http://www.google.com/a/feeds/example.com/emailList/2.0/us-sales/recipient/joe%40example.com
</atom:id>
<atom:updated>1970-01-01T00:00:00.000Z</atom:updated>
<atom:category scheme='http://schemas.google.com/g/2005#kind'
term='http://schemas.google.com/apps/2006#emailList.recipient'/>
<atom:title type="text">joe@example.com</atom:title>
<atom:link rel="self" type="application/atom+xml"
href="http://www.google.com/a/feeds/example.com/emailList/2.0/us-sales/recipient/joe%40example.com"/>
<atom:link rel="edit" type="application/atom+xml"
href="http://www.google.com/a/feeds/example.com/emailList/2.0/us-sales/recipient/joe%40example.com"/>
<gd:who email="joe@example.com"/>
</atom:entry>
<atom:entry>
<atom:id>
http://www.google.com/a/feeds/example.com/emailList/2.0/us-sales/recipient/susan%40example.com
</atom:id>
<atom:updated>1970-01-01T00:00:00.000Z</atom:updated>
<atom:category scheme='http://schemas.google.com/g/2005#kind'
term='http://schemas.google.com/apps/2006#emailList.recipient'/>
<atom:title type="text">susan@example.com</atom:title>
<atom:link rel="self" type="application/atom+xml"
href="http://www.google.com/a/feeds/example.com/emailList/2.0/us-sales/recipient/susan%40example.com"/>
<atom:link rel="edit" type="application/atom+xml"
href="http://www.google.com/a/feeds/example.com/emailList/2.0/us-sales/recipient/susan%40example.com"/>
<gd:who email="susan@example.com"/>
</atom:entry>
</atom:feed>"""
ACL_FEED = """<?xml version='1.0' encoding='UTF-8'?>
<feed xmlns='http://www.w3.org/2005/Atom'
xmlns:openSearch='http://a9.com/-/spec/opensearchrss/1.0/'
xmlns:gAcl='http://schemas.google.com/acl/2007'>
<id>http://www.google.com/calendar/feeds/liz%40gmail.com/acl/full</id>
<updated>2007-04-21T00:52:04.000Z</updated>
<title type='text'>Elizabeth Bennet's access control list</title>
<link rel='http://schemas.google.com/acl/2007#controlledObject'
type='application/atom+xml'
href='http://www.google.com/calendar/feeds/liz%40gmail.com/private/full'>
</link>
<link rel='http://schemas.google.com/g/2005#feed'
type='application/atom+xml'
href='http://www.google.com/calendar/feeds/liz%40gmail.com/acl/full'>
</link>
<link rel='http://schemas.google.com/g/2005#post'
type='application/atom+xml'
href='http://www.google.com/calendar/feeds/liz%40gmail.com/acl/full'>
</link>
<link rel='self' type='application/atom+xml'
href='http://www.google.com/calendar/feeds/liz%40gmail.com/acl/full'>
</link>
<generator version='1.0'
uri='http://www.google.com/calendar'>Google Calendar</generator>
<openSearch:totalResults>2</openSearch:totalResults>
<openSearch:startIndex>1</openSearch:startIndex>
<entry>
<id>http://www.google.com/calendar/feeds/liz%40gmail.com/acl/full/user%3Aliz%40gmail.com</id>
<updated>2007-04-21T00:52:04.000Z</updated>
<category scheme='http://schemas.google.com/g/2005#kind'
term='http://schemas.google.com/acl/2007#accessRule'>
</category>
<title type='text'>owner</title>
<content type='text'></content>
<link rel='self' type='application/atom+xml'
href='http://www.google.com/calendar/feeds/liz%40gmail.com/acl/full/user%3Aliz%40gmail.com'>
</link>
<link rel='edit' type='application/atom+xml'
href='http://www.google.com/calendar/feeds/liz%40gmail.com/acl/full/user%3Aliz%40gmail.com'>
</link>
<author>
<name>Elizabeth Bennet</name>
<email>liz@gmail.com</email>
</author>
<gAcl:scope type='user' value='liz@gmail.com'></gAcl:scope>
<gAcl:role value='http://schemas.google.com/gCal/2005#owner'>
</gAcl:role>
</entry>
<entry>
<id>http://www.google.com/calendar/feeds/liz%40gmail.com/acl/full/default</id>
<updated>2007-04-21T00:52:04.000Z</updated>
<category scheme='http://schemas.google.com/g/2005#kind'
term='http://schemas.google.com/acl/2007#accessRule'>
</category>
<title type='text'>read</title>
<content type='text'></content>
<link rel='self' type='application/atom+xml'
href='http://www.google.com/calendar/feeds/liz%40gmail.com/acl/full/default'>
</link>
<link rel='edit' type='application/atom+xml'
href='http://www.google.com/calendar/feeds/liz%40gmail.com/acl/full/default'>
</link>
<author>
<name>Elizabeth Bennet</name>
<email>liz@gmail.com</email>
</author>
<gAcl:scope type='default'></gAcl:scope>
<gAcl:role value='http://schemas.google.com/gCal/2005#read'>
</gAcl:role>
</entry>
</feed>"""
ACL_ENTRY = """<?xml version='1.0' encoding='UTF-8'?>
<entry xmlns='http://www.w3.org/2005/Atom' xmlns:openSearch='http://a9.com/-/spec/opensearchrss/1.0/' xmlns:gd='http://schemas.google.com/g/2005' xmlns:gCal='http://schemas.google.com/gCal/2005' xmlns:gAcl='http://schemas.google.com/acl/2007'>
<id>http://www.google.com/calendar/feeds/liz%40gmail.com/acl/full/user%3Aliz%40gmail.com</id>
<updated>2007-04-21T00:52:04.000Z</updated>
<category scheme='http://schemas.google.com/g/2005#kind'
term='http://schemas.google.com/acl/2007#accessRule'>
</category>
<title type='text'>owner</title>
<content type='text'></content>
<link rel='self' type='application/atom+xml'
href='http://www.google.com/calendar/feeds/liz%40gmail.com/acl/full/user%3Aliz%40gmail.com'>
</link>
<link rel='edit' type='application/atom+xml'
href='http://www.google.com/calendar/feeds/liz%40gmail.com/acl/full/user%3Aliz%40gmail.com'>
</link>
<author>
<name>Elizabeth Bennet</name>
<email>liz@gmail.com</email>
</author>
<gAcl:scope type='user' value='liz@gmail.com'></gAcl:scope>
<gAcl:role value='http://schemas.google.com/gCal/2005#owner'>
</gAcl:role>
</entry>"""
DOCUMENT_LIST_FEED = """<?xml version='1.0' encoding='UTF-8'?>
<ns0:feed xmlns:ns0="http://www.w3.org/2005/Atom"><ns1:totalResults
xmlns:ns1="http://a9.com/-/spec/opensearchrss/1.0/">2</ns1:totalResults><ns1:startIndex
xmlns:ns1="http://a9.com/-/spec/opensearchrss/1.0/">1</ns1:startIndex><ns0:entry><ns0:content
src="http://foo.com/fm?fmcmd=102&key=supercalifragilisticexpeadocious"
type="text/html"
/><ns0:author><ns0:name>test.user</ns0:name><ns0:email>test.user@gmail.com</ns0:email></ns0:author><ns0:category
label="spreadsheet" scheme="http://schemas.google.com/g/2005#kind"
term="http://schemas.google.com/docs/2007#spreadsheet"
/><ns0:id>http://docs.google.com/feeds/documents/private/full/spreadsheet%3Asupercalifragilisticexpeadocious</ns0:id><ns0:link
href="http://foo.com/ccc?key=supercalifragilisticexpeadocious" rel="alternate"
type="text/html" /><ns0:link
href="http://foo.com/feeds/worksheets/supercalifragilisticexpeadocious/private/full"
rel="http://schemas.google.com/spreadsheets/2006#worksheetsfeed"
type="application/atom+xml" /><ns0:link
href="http://docs.google.com/feeds/documents/private/full/spreadsheet%3Asupercalifragilisticexpeadocious"
rel="self" type="application/atom+xml" /><ns0:title type="text">Test Spreadsheet</ns0:title><ns0:updated>2007-07-03T18:03:32.045Z</ns0:updated></ns0:entry><ns0:entry><ns0:content
src="http://docs.google.com/RawDocContents?action=fetch&docID=gr00vy"
type="text/html"
/><ns0:author><ns0:name>test.user</ns0:name><ns0:email>test.user@gmail.com</ns0:email></ns0:author><ns0:category
label="document" scheme="http://schemas.google.com/g/2005#kind"
term="http://schemas.google.com/docs/2007#document"
/><ns0:id>http://docs.google.com/feeds/documents/private/full/document%3Agr00vy</ns0:id><ns0:link
href="http://foobar.com/Doc?id=gr00vy" rel="alternate" type="text/html"
/><ns0:link
href="http://docs.google.com/feeds/documents/private/full/document%3Agr00vy"
rel="self" type="application/atom+xml" /><ns0:title type="text">Test Document</ns0:title><ns0:updated>2007-07-03T18:02:50.338Z</ns0:updated></ns0:entry><ns0:id>http://docs.google.com/feeds/documents/private/full</ns0:id><ns0:link
href="http://docs.google.com" rel="alternate" type="text/html" /><ns0:link
href="http://docs.google.com/feeds/documents/private/full"
rel="http://schemas.google.com/g/2005#feed" type="application/atom+xml"
/><ns0:link href="http://docs.google.com/feeds/documents/private/full"
rel="http://schemas.google.com/g/2005#post" type="application/atom+xml"
/><ns0:link href="http://docs.google.com/feeds/documents/private/full"
rel="self" type="application/atom+xml" /><ns0:title type="text">Available
Documents -
test.user@gmail.com</ns0:title><ns0:updated>2007-07-09T23:07:21.898Z</ns0:updated></ns0:feed>
"""
DOCUMENT_LIST_ENTRY = """<?xml version='1.0' encoding='UTF-8'?>
<ns0:entry xmlns:ns0="http://www.w3.org/2005/Atom"><ns0:content
src="http://foo.com/fm?fmcmd=102&key=supercalifragilisticexpealidocious"
type="text/html"
/><ns0:author><ns0:name>test.user</ns0:name><ns0:email>test.user@gmail.com</ns0:email></ns0:author><ns0:category
label="spreadsheet" scheme="http://schemas.google.com/g/2005#kind"
term="http://schemas.google.com/docs/2007#spreadsheet"
/><ns0:id>http://docs.google.com/feeds/documents/private/full/spreadsheet%3Asupercalifragilisticexpealidocious</ns0:id><ns0:link
href="http://foo.com/ccc?key=supercalifragilisticexpealidocious"
rel="alternate" type="text/html" /><ns0:link
href="http://foo.com/feeds/worksheets/supercalifragilisticexpealidocious/private/full"
rel="http://schemas.google.com/spreadsheets/2006#worksheetsfeed"
type="application/atom+xml" /><ns0:link
href="http://docs.google.com/feeds/documents/private/full/spreadsheet%3Asupercalifragilisticexpealidocious"
rel="self" type="application/atom+xml" /><ns0:title type="text">Test Spreadsheet</ns0:title><ns0:updated>2007-07-03T18:03:32.045Z</ns0:updated></ns0:entry>
"""
BATCH_ENTRY = """<?xml version='1.0' encoding='UTF-8'?>
<entry xmlns="http://www.w3.org/2005/Atom"
xmlns:batch="http://schemas.google.com/gdata/batch"
xmlns:g="http://base.google.com/ns/1.0">
<id>http://www.google.com/base/feeds/items/2173859253842813008</id>
<published>2006-07-11T14:51:43.560Z</published>
<updated>2006-07-11T14:51: 43.560Z</updated>
<title type="text">title</title>
<content type="html">content</content>
<link rel="self"
type="application/atom+xml"
href="http://www.google.com/base/feeds/items/2173859253842813008"/>
<link rel="edit"
type="application/atom+xml"
href="http://www.google.com/base/feeds/items/2173859253842813008"/>
<g:item_type>recipes</g:item_type>
<batch:operation type="insert"/>
<batch:id>itemB</batch:id>
<batch:status code="201" reason="Created"/>
</entry>"""
BATCH_FEED_REQUEST = """<?xml version="1.0" encoding="UTF-8"?>
<feed
xmlns="http://www.w3.org/2005/Atom"
xmlns:openSearch="http://a9.com/-/spec/opensearchrss/1.0/"
xmlns:g="http://base.google.com/ns/1.0"
xmlns:batch="http://schemas.google.com/gdata/batch">
<title type="text">My Batch Feed</title>
<entry>
<id>http://www.google.com/base/feeds/items/13308004346459454600</id>
<batch:operation type="delete"/>
</entry>
<entry>
<id>http://www.google.com/base/feeds/items/17437536661927313949</id>
<batch:operation type="delete"/>
</entry>
<entry>
<title type="text">...</title>
<content type="html">...</content>
<batch:id>itemA</batch:id>
<batch:operation type="insert"/>
<g:item_type>recipes</g:item_type>
</entry>
<entry>
<title type="text">...</title>
<content type="html">...</content>
<batch:id>itemB</batch:id>
<batch:operation type="insert"/>
<g:item_type>recipes</g:item_type>
</entry>
</feed>"""
BATCH_FEED_RESULT = """<?xml version="1.0" encoding="UTF-8"?>
<feed
xmlns="http://www.w3.org/2005/Atom"
xmlns:openSearch="http://a9.com/-/spec/opensearchrss/1.0/"
xmlns:g="http://base.google.com/ns/1.0"
xmlns:batch="http://schemas.google.com/gdata/batch">
<id>http://www.google.com/base/feeds/items</id>
<updated>2006-07-11T14:51:42.894Z</updated>
<title type="text">My Batch</title>
<link rel="http://schemas.google.com/g/2005#feed"
type="application/atom+xml"
href="http://www.google.com/base/feeds/items"/>
<link rel="http://schemas.google.com/g/2005#post"
type="application/atom+xml"
href="http://www.google.com/base/feeds/items"/>
<link rel=" http://schemas.google.com/g/2005#batch"
type="application/atom+xml"
href="http://www.google.com/base/feeds/items/batch"/>
<entry>
<id>http://www.google.com/base/feeds/items/2173859253842813008</id>
<published>2006-07-11T14:51:43.560Z</published>
<updated>2006-07-11T14:51: 43.560Z</updated>
<title type="text">...</title>
<content type="html">...</content>
<link rel="self"
type="application/atom+xml"
href="http://www.google.com/base/feeds/items/2173859253842813008"/>
<link rel="edit"
type="application/atom+xml"
href="http://www.google.com/base/feeds/items/2173859253842813008"/>
<g:item_type>recipes</g:item_type>
<batch:operation type="insert"/>
<batch:id>itemB</batch:id>
<batch:status code="201" reason="Created"/>
</entry>
<entry>
<id>http://www.google.com/base/feeds/items/11974645606383737963</id>
<published>2006-07-11T14:51:43.247Z</published>
<updated>2006-07-11T14:51: 43.247Z</updated>
<title type="text">...</title>
<content type="html">...</content>
<link rel="self"
type="application/atom+xml"
href="http://www.google.com/base/feeds/items/11974645606383737963"/>
<link rel="edit"
type="application/atom+xml"
href="http://www.google.com/base/feeds/items/11974645606383737963"/>
<g:item_type>recipes</g:item_type>
<batch:operation type="insert"/>
<batch:id>itemA</batch:id>
<batch:status code="201" reason="Created"/>
</entry>
<entry>
<id>http://www.google.com/base/feeds/items/13308004346459454600</id>
<updated>2006-07-11T14:51:42.894Z</updated>
<title type="text">Error</title>
<content type="text">Bad request</content>
<batch:status code="404"
reason="Bad request"
content-type="application/xml">
<errors>
<error type="request" reason="Cannot find item"/>
</errors>
</batch:status>
</entry>
<entry>
<id>http://www.google.com/base/feeds/items/17437536661927313949</id>
<updated>2006-07-11T14:51:43.246Z</updated>
<content type="text">Deleted</content>
<batch:operation type="delete"/>
<batch:status code="200" reason="Success"/>
</entry>
</feed>"""
ALBUM_FEED = """<?xml version="1.0" encoding="UTF-8"?>
<feed xmlns="http://www.w3.org/2005/Atom" xmlns:openSearch="http://a9.com/-/spec/opensearchrss/1.0/" xmlns:exif="http://schemas.google.com/photos/exif/2007" xmlns:geo="http://www.w3.org/2003/01/geo/wgs84_pos#" xmlns:gml="http://www.opengis.net/gml" xmlns:georss="http://www.georss.org/georss" xmlns:photo="http://www.pheed.com/pheed/" xmlns:media="http://search.yahoo.com/mrss/" xmlns:batch="http://schemas.google.com/gdata/batch" xmlns:gphoto="http://schemas.google.com/photos/2007">
<id>http://picasaweb.google.com/data/feed/api/user/sample.user/albumid/1</id>
<updated>2007-09-21T18:23:05.000Z</updated>
<category scheme="http://schemas.google.com/g/2005#kind" term="http://schemas.google.com/photos/2007#album"/>
<title type="text">Test</title>
<subtitle type="text"/>
<rights type="text">public</rights>
<icon>http://lh6.google.com/sample.user/Rt8WNoDZEJE/AAAAAAAAABk/HQGlDhpIgWo/s160-c/Test.jpg</icon>
<link rel="http://schemas.google.com/g/2005#feed" type="application/atom+xml" href="http://picasaweb.google.com/data/feed/api/user/sample.user/albumid/1"/>
<link rel="alternate" type="text/html" href="http://picasaweb.google.com/sample.user/Test"/>
<link rel="http://schemas.google.com/photos/2007#slideshow" type="application/x-shockwave-flash" href="http://picasaweb.google.com/s/c/bin/slideshow.swf?host=picasaweb.google.com&RGB=0x000000&feed=http%3A%2F%2Fpicasaweb.google.com%2Fdata%2Ffeed%2Fapi%2Fuser%2Fsample.user%2Falbumid%2F1%3Falt%3Drss"/>
<link rel="self" type="application/atom+xml" href="http://picasaweb.google.com/data/feed/api/user/sample.user/albumid/1?start-index=1&max-results=500&kind=photo%2Ctag"/>
<author>
<name>sample</name>
<uri>http://picasaweb.google.com/sample.user</uri>
</author>
<generator version="1.00" uri="http://picasaweb.google.com/">Picasaweb</generator> <openSearch:totalResults>4</openSearch:totalResults>
<openSearch:startIndex>1</openSearch:startIndex>
<openSearch:itemsPerPage>500</openSearch:itemsPerPage>
<gphoto:id>1</gphoto:id>
<gphoto:name>Test</gphoto:name>
<gphoto:location/>
<gphoto:access>public</gphoto:access> <gphoto:timestamp>1188975600000</gphoto:timestamp>
<gphoto:numphotos>2</gphoto:numphotos>
<gphoto:user>sample.user</gphoto:user>
<gphoto:nickname>sample</gphoto:nickname>
<gphoto:commentingEnabled>true</gphoto:commentingEnabled>
<gphoto:commentCount>0</gphoto:commentCount>
<entry> <id>http://picasaweb.google.com/data/entry/api/user/sample.user/albumid/1/photoid/2</id>
<published>2007-09-05T20:49:23.000Z</published>
<updated>2007-09-21T18:23:05.000Z</updated>
<category scheme="http://schemas.google.com/g/2005#kind" term="http://schemas.google.com/photos/2007#photo"/>
<title type="text">Aqua Blue.jpg</title>
<summary type="text">Blue</summary>
<content type="image/jpeg" src="http://lh4.google.com/sample.user/Rt8WU4DZEKI/AAAAAAAAABY/IVgLqmnzJII/Aqua%20Blue.jpg"/> <link rel="http://schemas.google.com/g/2005#feed" type="application/atom+xml" href="http://picasaweb.google.com/data/feed/api/user/sample.user/albumid/1/photoid/2"/>
<link rel="alternate" type="text/html" href="http://picasaweb.google.com/sample.user/Test/photo#2"/>
<link rel="self" type="application/atom+xml" href="http://picasaweb.google.com/data/entry/api/user/sample.user/albumid/1/photoid/2"/>
<gphoto:id>2</gphoto:id>
<gphoto:version>1190398985145172</gphoto:version>
<gphoto:position>0.0</gphoto:position>
<gphoto:albumid>1</gphoto:albumid> <gphoto:width>2560</gphoto:width>
<gphoto:height>1600</gphoto:height>
<gphoto:size>883405</gphoto:size>
<gphoto:client/>
<gphoto:checksum/>
<gphoto:timestamp>1189025362000</gphoto:timestamp>
<exif:tags> <exif:flash>true</exif:flash>
<exif:imageUniqueID>c041ce17aaa637eb656c81d9cf526c24</exif:imageUniqueID>
</exif:tags>
<gphoto:commentingEnabled>true</gphoto:commentingEnabled>
<gphoto:commentCount>1</gphoto:commentCount>
<media:group>
<media:title type="plain">Aqua Blue.jpg</media:title> <media:description type="plain">Blue</media:description>
<media:keywords>tag, test</media:keywords>
<media:content url="http://lh4.google.com/sample.user/Rt8WU4DZEKI/AAAAAAAAABY/IVgLqmnzJII/Aqua%20Blue.jpg" height="1600" width="2560" type="image/jpeg" medium="image"/>
<media:thumbnail url="http://lh4.google.com/sample.user/Rt8WU4DZEKI/AAAAAAAAABY/IVgLqmnzJII/s72/Aqua%20Blue.jpg" height="45" width="72"/>
<media:thumbnail url="http://lh4.google.com/sample.user/Rt8WU4DZEKI/AAAAAAAAABY/IVgLqmnzJII/s144/Aqua%20Blue.jpg" height="90" width="144"/>
<media:thumbnail url="http://lh4.google.com/sample.user/Rt8WU4DZEKI/AAAAAAAAABY/IVgLqmnzJII/s288/Aqua%20Blue.jpg" height="180" width="288"/>
<media:credit>sample</media:credit>
</media:group>
</entry>
<entry>
<id>http://picasaweb.google.com/data/entry/api/user/sample.user/albumid/1/photoid/3</id>
<published>2007-09-05T20:49:24.000Z</published>
<updated>2007-09-21T18:19:38.000Z</updated>
<category scheme="http://schemas.google.com/g/2005#kind" term="http://schemas.google.com/photos/2007#photo"/>
<title type="text">Aqua Graphite.jpg</title>
<summary type="text">Gray</summary>
<content type="image/jpeg" src="http://lh5.google.com/sample.user/Rt8WVIDZELI/AAAAAAAAABg/d7e0i7gvhNU/Aqua%20Graphite.jpg"/>
<link rel="http://schemas.google.com/g/2005#feed" type="application/atom+xml" href="http://picasaweb.google.com/data/feed/api/user/sample.user/albumid/1/photoid/3"/>
<link rel="alternate" type="text/html" href="http://picasaweb.google.com/sample.user/Test/photo#3"/>
<link rel="self" type="application/atom+xml" href="http://picasaweb.google.com/data/entry/api/user/sample.user/albumid/1/photoid/3"/>
<gphoto:id>3</gphoto:id>
<gphoto:version>1190398778006402</gphoto:version>
<gphoto:position>1.0</gphoto:position>
<gphoto:albumid>1</gphoto:albumid>
<gphoto:width>2560</gphoto:width>
<gphoto:height>1600</gphoto:height>
<gphoto:size>798334</gphoto:size>
<gphoto:client/>
<gphoto:checksum/>
<gphoto:timestamp>1189025363000</gphoto:timestamp>
<exif:tags>
<exif:flash>true</exif:flash>
<exif:imageUniqueID>a5ce2e36b9df7d3cb081511c72e73926</exif:imageUniqueID>
</exif:tags>
<gphoto:commentingEnabled>true</gphoto:commentingEnabled>
<gphoto:commentCount>0</gphoto:commentCount>
<media:group>
<media:title type="plain">Aqua Graphite.jpg</media:title>
<media:description type="plain">Gray</media:description>
<media:keywords/>
<media:content url="http://lh5.google.com/sample.user/Rt8WVIDZELI/AAAAAAAAABg/d7e0i7gvhNU/Aqua%20Graphite.jpg" height="1600" width="2560" type="image/jpeg" medium="image"/>
<media:thumbnail url="http://lh5.google.com/sample.user/Rt8WVIDZELI/AAAAAAAAABg/d7e0i7gvhNU/s72/Aqua%20Graphite.jpg" height="45" width="72"/>
<media:thumbnail url="http://lh5.google.com/sample.user/Rt8WVIDZELI/AAAAAAAAABg/d7e0i7gvhNU/s144/Aqua%20Graphite.jpg" height="90" width="144"/>
<media:thumbnail url="http://lh5.google.com/sample.user/Rt8WVIDZELI/AAAAAAAAABg/d7e0i7gvhNU/s288/Aqua%20Graphite.jpg" height="180" width="288"/>
<media:credit>sample</media:credit>
</media:group>
</entry>
<entry>
<id>http://picasaweb.google.com/data/entry/api/user/sample.user/albumid/1/tag/tag</id>
<updated>2007-09-05T20:49:24.000Z</updated>
<category scheme="http://schemas.google.com/g/2005#kind" term="http://schemas.google.com/photos/2007#tag"/>
<title type="text">tag</title>
<summary type="text">tag</summary>
<link rel="alternate" type="text/html" href="http://picasaweb.google.com/lh/searchbrowse?q=tag&psc=G&uname=sample.user&filter=0"/>
<link rel="self" type="application/atom+xml" href="http://picasaweb.google.com/data/entry/api/user/sample.user/albumid/1/tag/tag"/>
<author>
<name>sample</name>
<uri>http://picasaweb.google.com/sample.user</uri>
</author>
</entry>
<entry>
<id>http://picasaweb.google.com/data/entry/api/user/sample.user/albumid/1/tag/test</id>
<updated>2007-09-05T20:49:24.000Z</updated>
<category scheme="http://schemas.google.com/g/2005#kind" term="http://schemas.google.com/photos/2007#tag"/>
<title type="text">test</title>
<summary type="text">test</summary>
<link rel="alternate" type="text/html" href="http://picasaweb.google.com/lh/searchbrowse?q=test&psc=G&uname=sample.user&filter=0"/>
<link rel="self" type="application/atom+xml" href="http://picasaweb.google.com/data/entry/api/user/sample.user/albumid/1/tag/test"/>
<author>
<name>sample</name>
<uri>http://picasaweb.google.com/sample.user</uri>
</author>
</entry>
</feed>"""
CODE_SEARCH_FEED = """<?xml version="1.0" encoding="UTF-8"?>
<feed xmlns="http://www.w3.org/2005/Atom" xmlns:opensearch="http://a9.com/-/spec/opensearchrss/1.0/" xmlns:gcs="http://schemas.google.com/codesearch/2006" xml:base="http://www.google.com">
<id>http://www.google.com/codesearch/feeds/search?q=malloc</id>
<updated>2007-12-19T16:08:04Z</updated>
<title type="text">Google Code Search</title>
<generator version="1.0" uri="http://www.google.com/codesearch">Google Code Search</generator>
<opensearch:totalResults>2530000</opensearch:totalResults>
<opensearch:startIndex>1</opensearch:startIndex>
<author>
<name>Google Code Search</name>
<uri>http://www.google.com/codesearch</uri>
</author>
<link rel="http://schemas.google.com/g/2006#feed" type="application/atom+xml" href="http://schemas.google.com/codesearch/2006"/>
<link rel="self" type="application/atom+xml" href="http://www.google.com/codesearch/feeds/search?q=malloc"/>
<link rel="next" type="application/atom+xml" href="http://www.google.com/codesearch/feeds/search?q=malloc&start-index=11"/>
<link rel="alternate" type="text/html" href="http://www.google.com/codesearch?q=malloc"/>
<entry><id>http://www.google.com/codesearch?hl=en&q=+malloc+show:LDjwp-Iqc7U:84hEYaYsZk8:xDGReDhvNi0&sa=N&ct=rx&cd=1&cs_p=http://www.gnu.org&cs_f=software/autoconf/manual/autoconf-2.60/autoconf.html-002&cs_p=http://www.gnu.org&cs_f=software/autoconf/manual/autoconf-2.60/autoconf.html-002#first</id><updated>2007-12-19T16:08:04Z</updated><author><name>Code owned by external author.</name></author><title type="text">software/autoconf/manual/autoconf-2.60/autoconf.html</title><link rel="alternate" type="text/html" href="http://www.google.com/codesearch?hl=en&q=+malloc+show:LDjwp-Iqc7U:84hEYaYsZk8:xDGReDhvNi0&sa=N&ct=rx&cd=1&cs_p=http://www.gnu.org&cs_f=software/autoconf/manual/autoconf-2.60/autoconf.html-002&cs_p=http://www.gnu.org&cs_f=software/autoconf/manual/autoconf-2.60/autoconf.html-002#first"/><gcs:package name="http://www.gnu.org" uri="http://www.gnu.org"></gcs:package><gcs:file name="software/autoconf/manual/autoconf-2.60/autoconf.html-002"></gcs:file><content type="text/html"><pre> 8: void *<b>malloc</b> ();
</pre></content><gcs:match lineNumber="4" type="text/html"><pre> #undef <b>malloc</b>
</pre></gcs:match><gcs:match lineNumber="8" type="text/html"><pre> void *<b>malloc</b> ();
</pre></gcs:match><gcs:match lineNumber="14" type="text/html"><pre> rpl_<b>malloc</b> (size_t n)
</pre></gcs:match><gcs:match lineNumber="18" type="text/html"><pre> return <b>malloc</b> (n);
</pre></gcs:match></entry>
<entry><id>http://www.google.com/codesearch?hl=en&q=+malloc+show:h4hfh-fV-jI:niBq_bwWZNs:H0OhClf0HWQ&sa=N&ct=rx&cd=2&cs_p=ftp://ftp.gnu.org/gnu/guile/guile-1.6.8.tar.gz&cs_f=guile-1.6.8/libguile/mallocs.c&cs_p=ftp://ftp.gnu.org/gnu/guile/guile-1.6.8.tar.gz&cs_f=guile-1.6.8/libguile/mallocs.c#first</id><updated>2007-12-19T16:08:04Z</updated><author><name>Code owned by external author.</name></author><title type="text">guile-1.6.8/libguile/mallocs.c</title><link rel="alternate" type="text/html" href="http://www.google.com/codesearch?hl=en&q=+malloc+show:h4hfh-fV-jI:niBq_bwWZNs:H0OhClf0HWQ&sa=N&ct=rx&cd=2&cs_p=ftp://ftp.gnu.org/gnu/guile/guile-1.6.8.tar.gz&cs_f=guile-1.6.8/libguile/mallocs.c&cs_p=ftp://ftp.gnu.org/gnu/guile/guile-1.6.8.tar.gz&cs_f=guile-1.6.8/libguile/mallocs.c#first"/><gcs:package name="ftp://ftp.gnu.org/gnu/guile/guile-1.6.8.tar.gz" uri="ftp://ftp.gnu.org/gnu/guile/guile-1.6.8.tar.gz"></gcs:package><gcs:file name="guile-1.6.8/libguile/mallocs.c"></gcs:file><content type="text/html"><pre> 86: {
scm_t_bits mem = n ? (scm_t_bits) <b>malloc</b> (n) : 0;
if (n &amp;&amp; !mem)
</pre></content><gcs:match lineNumber="54" type="text/html"><pre>#include &lt;<b>malloc</b>.h&gt;
</pre></gcs:match><gcs:match lineNumber="62" type="text/html"><pre>scm_t_bits scm_tc16_<b>malloc</b>;
</pre></gcs:match><gcs:match lineNumber="66" type="text/html"><pre><b>malloc</b>_free (SCM ptr)
</pre></gcs:match><gcs:match lineNumber="75" type="text/html"><pre><b>malloc</b>_print (SCM exp, SCM port, scm_print_state *pstate SCM_UNUSED)
</pre></gcs:match><gcs:match lineNumber="77" type="text/html"><pre> scm_puts(&quot;#&lt;<b>malloc</b> &quot;, port);
</pre></gcs:match><gcs:match lineNumber="87" type="text/html"><pre> scm_t_bits mem = n ? (scm_t_bits) <b>malloc</b> (n) : 0;
</pre></gcs:match><gcs:match lineNumber="90" type="text/html"><pre> SCM_RETURN_NEWSMOB (scm_tc16_<b>malloc</b>, mem);
</pre></gcs:match><gcs:match lineNumber="98" type="text/html"><pre> scm_tc16_<b>malloc</b> = scm_make_smob_type (&quot;<b>malloc</b>&quot;, 0);
</pre></gcs:match><gcs:match lineNumber="99" type="text/html"><pre> scm_set_smob_free (scm_tc16_<b>malloc</b>, <b>malloc</b>_free);
</pre></gcs:match><rights>GPL</rights></entry>
<entry><id>http://www.google.com/codesearch?hl=en&q=+malloc+show:9wyZUG-N_30:7_dFxoC1ZrY:C0_iYbFj90M&sa=N&ct=rx&cd=3&cs_p=http://ftp.gnu.org/gnu/bash/bash-3.0.tar.gz&cs_f=bash-3.0/lib/malloc/alloca.c&cs_p=http://ftp.gnu.org/gnu/bash/bash-3.0.tar.gz&cs_f=bash-3.0/lib/malloc/alloca.c#first</id><updated>2007-12-19T16:08:04Z</updated><author><name>Code owned by external author.</name></author><title type="text">bash-3.0/lib/malloc/alloca.c</title><link rel="alternate" type="text/html" href="http://www.google.com/codesearch?hl=en&q=+malloc+show:9wyZUG-N_30:7_dFxoC1ZrY:C0_iYbFj90M&sa=N&ct=rx&cd=3&cs_p=http://ftp.gnu.org/gnu/bash/bash-3.0.tar.gz&cs_f=bash-3.0/lib/malloc/alloca.c&cs_p=http://ftp.gnu.org/gnu/bash/bash-3.0.tar.gz&cs_f=bash-3.0/lib/malloc/alloca.c#first"/><gcs:package name="http://ftp.gnu.org/gnu/bash/bash-3.0.tar.gz" uri="http://ftp.gnu.org/gnu/bash/bash-3.0.tar.gz"></gcs:package><gcs:file name="bash-3.0/lib/malloc/alloca.c"></gcs:file><content type="text/html"><pre> 78: #ifndef emacs
#define <b>malloc</b> x<b>malloc</b>
extern pointer x<b>malloc</b> ();
</pre></content><gcs:match lineNumber="69" type="text/html"><pre> <b>malloc</b>. The Emacs executable needs alloca to call x<b>malloc</b>, because
</pre></gcs:match><gcs:match lineNumber="70" type="text/html"><pre> ordinary <b>malloc</b> isn&#39;t protected from input signals. On the other
</pre></gcs:match><gcs:match lineNumber="71" type="text/html"><pre> hand, the utilities in lib-src need alloca to call <b>malloc</b>; some of
</pre></gcs:match><gcs:match lineNumber="72" type="text/html"><pre> them are very simple, and don&#39;t have an x<b>malloc</b> routine.
</pre></gcs:match><gcs:match lineNumber="76" type="text/html"><pre> Callers below should use <b>malloc</b>. */
</pre></gcs:match><gcs:match lineNumber="79" type="text/html"><pre>#define <b>malloc</b> x<b>malloc</b>
</pre></gcs:match><gcs:match lineNumber="80" type="text/html"><pre>extern pointer x<b>malloc</b> ();
</pre></gcs:match><gcs:match lineNumber="132" type="text/html"><pre> It is very important that sizeof(header) agree with <b>malloc</b>
</pre></gcs:match><gcs:match lineNumber="198" type="text/html"><pre> register pointer new = <b>malloc</b> (sizeof (header) + size);
</pre></gcs:match><rights>GPL</rights></entry>
<entry><id>http://www.google.com/codesearch?hl=en&q=+malloc+show:uhVCKyPcT6k:8juMxxzmUJw:H7_IDsTB2L4&sa=N&ct=rx&cd=4&cs_p=http://ftp.mozilla.org/pub/mozilla.org/mozilla/releases/mozilla1.7b/src/mozilla-source-1.7b-source.tar.bz2&cs_f=mozilla/xpcom/build/malloc.c&cs_p=http://ftp.mozilla.org/pub/mozilla.org/mozilla/releases/mozilla1.7b/src/mozilla-source-1.7b-source.tar.bz2&cs_f=mozilla/xpcom/build/malloc.c#first</id><updated>2007-12-19T16:08:04Z</updated><author><name>Code owned by external author.</name></author><title type="text">mozilla/xpcom/build/malloc.c</title><link rel="alternate" type="text/html" href="http://www.google.com/codesearch?hl=en&q=+malloc+show:uhVCKyPcT6k:8juMxxzmUJw:H7_IDsTB2L4&sa=N&ct=rx&cd=4&cs_p=http://ftp.mozilla.org/pub/mozilla.org/mozilla/releases/mozilla1.7b/src/mozilla-source-1.7b-source.tar.bz2&cs_f=mozilla/xpcom/build/malloc.c&cs_p=http://ftp.mozilla.org/pub/mozilla.org/mozilla/releases/mozilla1.7b/src/mozilla-source-1.7b-source.tar.bz2&cs_f=mozilla/xpcom/build/malloc.c#first"/><gcs:package name="http://ftp.mozilla.org/pub/mozilla.org/mozilla/releases/mozilla1.7b/src/mozilla-source-1.7b-source.tar.bz2" uri="http://ftp.mozilla.org/pub/mozilla.org/mozilla/releases/mozilla1.7b/src/mozilla-source-1.7b-source.tar.bz2"></gcs:package><gcs:file name="mozilla/xpcom/build/malloc.c"></gcs:file><content type="text/html"><pre> 54: http://gee.cs.oswego.edu/dl/html/<b>malloc</b>.html
You may already by default be using a c library containing a <b>malloc</b>
</pre></content><gcs:match lineNumber="4" type="text/html"><pre>/* ---------- To make a <b>malloc</b>.h, start cutting here ------------ */
</pre></gcs:match><gcs:match lineNumber="22" type="text/html"><pre> Note: There may be an updated version of this <b>malloc</b> obtainable at
</pre></gcs:match><gcs:match lineNumber="23" type="text/html"><pre> ftp://gee.cs.oswego.edu/pub/misc/<b>malloc</b>.c
</pre></gcs:match><gcs:match lineNumber="34" type="text/html"><pre>* Why use this <b>malloc</b>?
</pre></gcs:match><gcs:match lineNumber="37" type="text/html"><pre> most tunable <b>malloc</b> ever written. However it is among the fastest
</pre></gcs:match><gcs:match lineNumber="40" type="text/html"><pre> allocator for <b>malloc</b>-intensive programs.
</pre></gcs:match><gcs:match lineNumber="54" type="text/html"><pre> http://gee.cs.oswego.edu/dl/html/<b>malloc</b>.html
</pre></gcs:match><gcs:match lineNumber="56" type="text/html"><pre> You may already by default be using a c library containing a <b>malloc</b>
</pre></gcs:match><gcs:match lineNumber="57" type="text/html"><pre> that is somehow based on some version of this <b>malloc</b> (for example in
</pre></gcs:match><rights>Mozilla</rights></entry>
<entry><id>http://www.google.com/codesearch?hl=en&q=+malloc+show:4n1P2HVOISs:Ybbpph0wR2M:OhIN_sDrG0U&sa=N&ct=rx&cd=5&cs_p=http://regexps.srparish.net/src/hackerlab/hackerlab-1.0pre2.tar.gz&cs_f=hackerlab-1.0pre2/src/hackerlab/tests/mem-tests/unit-must-malloc.sh&cs_p=http://regexps.srparish.net/src/hackerlab/hackerlab-1.0pre2.tar.gz&cs_f=hackerlab-1.0pre2/src/hackerlab/tests/mem-tests/unit-must-malloc.sh#first</id><updated>2007-12-19T16:08:04Z</updated><author><name>Code owned by external author.</name></author><title type="text">hackerlab-1.0pre2/src/hackerlab/tests/mem-tests/unit-must-malloc.sh</title><link rel="alternate" type="text/html" href="http://www.google.com/codesearch?hl=en&q=+malloc+show:4n1P2HVOISs:Ybbpph0wR2M:OhIN_sDrG0U&sa=N&ct=rx&cd=5&cs_p=http://regexps.srparish.net/src/hackerlab/hackerlab-1.0pre2.tar.gz&cs_f=hackerlab-1.0pre2/src/hackerlab/tests/mem-tests/unit-must-malloc.sh&cs_p=http://regexps.srparish.net/src/hackerlab/hackerlab-1.0pre2.tar.gz&cs_f=hackerlab-1.0pre2/src/hackerlab/tests/mem-tests/unit-must-malloc.sh#first"/><gcs:package name="http://regexps.srparish.net/src/hackerlab/hackerlab-1.0pre2.tar.gz" uri="http://regexps.srparish.net/src/hackerlab/hackerlab-1.0pre2.tar.gz"></gcs:package><gcs:file name="hackerlab-1.0pre2/src/hackerlab/tests/mem-tests/unit-must-malloc.sh"></gcs:file><content type="text/html"><pre> 11: echo ================ unit-must-<b>malloc</b> tests ================
./unit-must-<b>malloc</b>
echo ...passed
</pre></content><gcs:match lineNumber="2" type="text/html"><pre># tag: Tom Lord Tue Dec 4 14:54:29 2001 (mem-tests/unit-must-<b>malloc</b>.sh)
</pre></gcs:match><gcs:match lineNumber="11" type="text/html"><pre>echo ================ unit-must-<b>malloc</b> tests ================
</pre></gcs:match><gcs:match lineNumber="12" type="text/html"><pre>./unit-must-<b>malloc</b>
</pre></gcs:match><rights>GPL</rights></entry>
<entry><id>http://www.google.com/codesearch?hl=en&q=+malloc+show:GzkwiWG266M:ykuz3bG00ws:2sTvVSif08g&sa=N&ct=rx&cd=6&cs_p=http://ftp.gnu.org/gnu/tar/tar-1.14.tar.bz2&cs_f=tar-1.14/lib/malloc.c&cs_p=http://ftp.gnu.org/gnu/tar/tar-1.14.tar.bz2&cs_f=tar-1.14/lib/malloc.c#first</id><updated>2007-12-19T16:08:04Z</updated><author><name>Code owned by external author.</name></author><title type="text">tar-1.14/lib/malloc.c</title><link rel="alternate" type="text/html" href="http://www.google.com/codesearch?hl=en&q=+malloc+show:GzkwiWG266M:ykuz3bG00ws:2sTvVSif08g&sa=N&ct=rx&cd=6&cs_p=http://ftp.gnu.org/gnu/tar/tar-1.14.tar.bz2&cs_f=tar-1.14/lib/malloc.c&cs_p=http://ftp.gnu.org/gnu/tar/tar-1.14.tar.bz2&cs_f=tar-1.14/lib/malloc.c#first"/><gcs:package name="http://ftp.gnu.org/gnu/tar/tar-1.14.tar.bz2" uri="http://ftp.gnu.org/gnu/tar/tar-1.14.tar.bz2"></gcs:package><gcs:file name="tar-1.14/lib/malloc.c"></gcs:file><content type="text/html"><pre> 22: #endif
#undef <b>malloc</b>
</pre></content><gcs:match lineNumber="1" type="text/html"><pre>/* Work around bug on some systems where <b>malloc</b> (0) fails.
</pre></gcs:match><gcs:match lineNumber="23" type="text/html"><pre>#undef <b>malloc</b>
</pre></gcs:match><gcs:match lineNumber="31" type="text/html"><pre>rpl_<b>malloc</b> (size_t n)
</pre></gcs:match><gcs:match lineNumber="35" type="text/html"><pre> return <b>malloc</b> (n);
</pre></gcs:match><rights>GPL</rights></entry>
<entry><id>http://www.google.com/codesearch?hl=en&q=+malloc+show:o_TFIeBY6dY:ktI_dt8wPao:AI03BD1Dz0Y&sa=N&ct=rx&cd=7&cs_p=http://ftp.gnu.org/gnu/tar/tar-1.16.1.tar.gz&cs_f=tar-1.16.1/lib/malloc.c&cs_p=http://ftp.gnu.org/gnu/tar/tar-1.16.1.tar.gz&cs_f=tar-1.16.1/lib/malloc.c#first</id><updated>2007-12-19T16:08:04Z</updated><author><name>Code owned by external author.</name></author><title type="text">tar-1.16.1/lib/malloc.c</title><link rel="alternate" type="text/html" href="http://www.google.com/codesearch?hl=en&q=+malloc+show:o_TFIeBY6dY:ktI_dt8wPao:AI03BD1Dz0Y&sa=N&ct=rx&cd=7&cs_p=http://ftp.gnu.org/gnu/tar/tar-1.16.1.tar.gz&cs_f=tar-1.16.1/lib/malloc.c&cs_p=http://ftp.gnu.org/gnu/tar/tar-1.16.1.tar.gz&cs_f=tar-1.16.1/lib/malloc.c#first"/><gcs:package name="http://ftp.gnu.org/gnu/tar/tar-1.16.1.tar.gz" uri="http://ftp.gnu.org/gnu/tar/tar-1.16.1.tar.gz"></gcs:package><gcs:file name="tar-1.16.1/lib/malloc.c"></gcs:file><content type="text/html"><pre> 21: #include &lt;config.h&gt;
#undef <b>malloc</b>
</pre></content><gcs:match lineNumber="1" type="text/html"><pre>/* <b>malloc</b>() function that is glibc compatible.
</pre></gcs:match><gcs:match lineNumber="22" type="text/html"><pre>#undef <b>malloc</b>
</pre></gcs:match><gcs:match lineNumber="30" type="text/html"><pre>rpl_<b>malloc</b> (size_t n)
</pre></gcs:match><gcs:match lineNumber="34" type="text/html"><pre> return <b>malloc</b> (n);
</pre></gcs:match><rights>GPL</rights></entry>
<entry><id>http://www.google.com/codesearch?hl=en&q=+malloc+show:_ibw-VLkMoI:jBOtIJSmFd4:-0NUEVeCwfY&sa=N&ct=rx&cd=8&cs_p=http://freshmeat.net/redir/uclibc/20616/url_bz2/uClibc-0.9.28.1.tar.bz2&cs_f=uClibc-0.9.29/include/malloc.h&cs_p=http://freshmeat.net/redir/uclibc/20616/url_bz2/uClibc-0.9.28.1.tar.bz2&cs_f=uClibc-0.9.29/include/malloc.h#first</id><updated>2007-12-19T16:08:04Z</updated><author><name>Code owned by external author.</name></author><title type="text">uClibc-0.9.29/include/malloc.h</title><link rel="alternate" type="text/html" href="http://www.google.com/codesearch?hl=en&q=+malloc+show:_ibw-VLkMoI:jBOtIJSmFd4:-0NUEVeCwfY&sa=N&ct=rx&cd=8&cs_p=http://freshmeat.net/redir/uclibc/20616/url_bz2/uClibc-0.9.28.1.tar.bz2&cs_f=uClibc-0.9.29/include/malloc.h&cs_p=http://freshmeat.net/redir/uclibc/20616/url_bz2/uClibc-0.9.28.1.tar.bz2&cs_f=uClibc-0.9.29/include/malloc.h#first"/><gcs:package name="http://freshmeat.net/redir/uclibc/20616/url_bz2/uClibc-0.9.28.1.tar.bz2" uri="http://freshmeat.net/redir/uclibc/20616/url_bz2/uClibc-0.9.28.1.tar.bz2"></gcs:package><gcs:file name="uClibc-0.9.29/include/malloc.h"></gcs:file><content type="text/html"><pre> 1: /* Prototypes and definition for <b>malloc</b> implementation.
Copyright (C) 1996, 1997, 1999, 2000 Free Software Foundation, Inc.
</pre></content><gcs:match lineNumber="1" type="text/html"><pre>/* Prototypes and definition for <b>malloc</b> implementation.
</pre></gcs:match><gcs:match lineNumber="26" type="text/html"><pre> `pt<b>malloc</b>&#39;, a <b>malloc</b> implementation for multiple threads without
</pre></gcs:match><gcs:match lineNumber="28" type="text/html"><pre> See the files `pt<b>malloc</b>.c&#39; or `COPYRIGHT&#39; for copying conditions.
</pre></gcs:match><gcs:match lineNumber="32" type="text/html"><pre> This work is mainly derived from <b>malloc</b>-2.6.4 by Doug Lea
</pre></gcs:match><gcs:match lineNumber="35" type="text/html"><pre> ftp://g.oswego.edu/pub/misc/<b>malloc</b>.c
</pre></gcs:match><gcs:match lineNumber="40" type="text/html"><pre> `pt<b>malloc</b>.c&#39;.
</pre></gcs:match><gcs:match lineNumber="45" type="text/html"><pre># define __<b>malloc</b>_ptr_t void *
</pre></gcs:match><gcs:match lineNumber="51" type="text/html"><pre># define __<b>malloc</b>_ptr_t char *
</pre></gcs:match><gcs:match lineNumber="56" type="text/html"><pre># define __<b>malloc</b>_size_t size_t
</pre></gcs:match><rights>LGPL</rights></entry>
<entry><id>http://www.google.com/codesearch?hl=en&q=+malloc+show:F6qHcZ9vefo:bTX7o9gKfks:hECF4r_eKC0&sa=N&ct=rx&cd=9&cs_p=http://ftp.gnu.org/gnu/glibc/glibc-2.0.1.tar.gz&cs_f=glibc-2.0.1/hurd/hurdmalloc.h&cs_p=http://ftp.gnu.org/gnu/glibc/glibc-2.0.1.tar.gz&cs_f=glibc-2.0.1/hurd/hurdmalloc.h#first</id><updated>2007-12-19T16:08:04Z</updated><author><name>Code owned by external author.</name></author><title type="text">glibc-2.0.1/hurd/hurdmalloc.h</title><link rel="alternate" type="text/html" href="http://www.google.com/codesearch?hl=en&q=+malloc+show:F6qHcZ9vefo:bTX7o9gKfks:hECF4r_eKC0&sa=N&ct=rx&cd=9&cs_p=http://ftp.gnu.org/gnu/glibc/glibc-2.0.1.tar.gz&cs_f=glibc-2.0.1/hurd/hurdmalloc.h&cs_p=http://ftp.gnu.org/gnu/glibc/glibc-2.0.1.tar.gz&cs_f=glibc-2.0.1/hurd/hurdmalloc.h#first"/><gcs:package name="http://ftp.gnu.org/gnu/glibc/glibc-2.0.1.tar.gz" uri="http://ftp.gnu.org/gnu/glibc/glibc-2.0.1.tar.gz"></gcs:package><gcs:file name="glibc-2.0.1/hurd/hurdmalloc.h"></gcs:file><content type="text/html"><pre> 15: #define <b>malloc</b> _hurd_<b>malloc</b>
#define realloc _hurd_realloc
</pre></content><gcs:match lineNumber="3" type="text/html"><pre> All hurd-internal code which uses <b>malloc</b> et al includes this file so it
</pre></gcs:match><gcs:match lineNumber="4" type="text/html"><pre> will use the internal <b>malloc</b> routines _hurd_{<b>malloc</b>,realloc,free}
</pre></gcs:match><gcs:match lineNumber="7" type="text/html"><pre> of <b>malloc</b> et al is the unixoid one using sbrk.
</pre></gcs:match><gcs:match lineNumber="11" type="text/html"><pre>extern void *_hurd_<b>malloc</b> (size_t);
</pre></gcs:match><gcs:match lineNumber="15" type="text/html"><pre>#define <b>malloc</b> _hurd_<b>malloc</b>
</pre></gcs:match><rights>GPL</rights></entry>
<entry><id>http://www.google.com/codesearch?hl=en&q=+malloc+show:CHUvHYzyLc8:pdcAfzDA6lY:wjofHuNLTHg&sa=N&ct=rx&cd=10&cs_p=ftp://apache.mirrors.pair.com/httpd/httpd-2.2.4.tar.bz2&cs_f=httpd-2.2.4/srclib/apr/include/arch/netware/apr_private.h&cs_p=ftp://apache.mirrors.pair.com/httpd/httpd-2.2.4.tar.bz2&cs_f=httpd-2.2.4/srclib/apr/include/arch/netware/apr_private.h#first</id><updated>2007-12-19T16:08:04Z</updated><author><name>Code owned by external author.</name></author><title type="text">httpd-2.2.4/srclib/apr/include/arch/netware/apr_private.h</title><link rel="alternate" type="text/html" href="http://www.google.com/codesearch?hl=en&q=+malloc+show:CHUvHYzyLc8:pdcAfzDA6lY:wjofHuNLTHg&sa=N&ct=rx&cd=10&cs_p=ftp://apache.mirrors.pair.com/httpd/httpd-2.2.4.tar.bz2&cs_f=httpd-2.2.4/srclib/apr/include/arch/netware/apr_private.h&cs_p=ftp://apache.mirrors.pair.com/httpd/httpd-2.2.4.tar.bz2&cs_f=httpd-2.2.4/srclib/apr/include/arch/netware/apr_private.h#first"/><gcs:package name="ftp://apache.mirrors.pair.com/httpd/httpd-2.2.4.tar.bz2" uri="ftp://apache.mirrors.pair.com/httpd/httpd-2.2.4.tar.bz2"></gcs:package><gcs:file name="httpd-2.2.4/srclib/apr/include/arch/netware/apr_private.h"></gcs:file><content type="text/html"><pre> 173: #undef <b>malloc</b>
#define <b>malloc</b>(x) library_<b>malloc</b>(gLibHandle,x)
</pre></content><gcs:match lineNumber="170" type="text/html"><pre>/* Redefine <b>malloc</b> to use the library <b>malloc</b> call so
</pre></gcs:match><gcs:match lineNumber="173" type="text/html"><pre>#undef <b>malloc</b>
</pre></gcs:match><gcs:match lineNumber="174" type="text/html"><pre>#define <b>malloc</b>(x) library_<b>malloc</b>(gLibHandle,x)
</pre></gcs:match><rights>Apache</rights></entry>
</feed>"""
YOU_TUBE_VIDEO_FEED = """<?xml version='1.0' encoding='UTF-8'?>
<feed xmlns='http://www.w3.org/2005/Atom'
xmlns:openSearch='http://a9.com/-/spec/opensearchrss/1.0/'
xmlns:gml='http://www.opengis.net/gml'
xmlns:georss='http://www.georss.org/georss'
xmlns:media='http://search.yahoo.com/mrss/'
xmlns:yt='http://gdata.youtube.com/schemas/2007'
xmlns:gd='http://schemas.google.com/g/2005'>
<id>http://gdata.youtube.com/feeds/api/standardfeeds/top_rated</id>
<updated>2008-02-21T18:57:10.801Z</updated>
<category scheme='http://schemas.google.com/g/2005#kind'
term='http://gdata.youtube.com/schemas/2007#video'/>
<title type='text'>Top Rated</title>
<logo>http://www.youtube.com/img/pic_youtubelogo_123x63.gif</logo>
<link rel='alternate' type='text/html'
href='http://www.youtube.com/browser?s=tr'/>
<link rel='http://schemas.google.com/g/2005#feed' type='application/atom+xml'
href='http://gdata.youtube.com/feeds/api/standardfeeds/top_rated'/>
<link rel='self' type='application/atom+xml'
href='http://gdata.youtube.com/feeds/api/standardfeeds/top_rated?start_index=1&max-results=25'/>
<link rel='self' type='application/atom+xml'
href='http://gdata.youtube.com/feeds/api/standardfeeds/top_rated?start_index=26&max-results=25'/>
<author>
<name>YouTube</name>
<uri>http://www.youtube.com/</uri>
</author>
<generator version='beta'
uri='http://gdata.youtube.com/'>YouTube data API</generator>
<openSearch:totalResults>99</openSearch:totalResults>
<openSearch:startIndex>1</openSearch:startIndex>
<openSearch:itemsPerPage>25</openSearch:itemsPerPage>
<entry>
<id>http://gdata.youtube.com/feeds/api/videos/ZTUVgYoeN_b</id>
<published>2007-02-16T20:22:57.000Z</published>
<updated>2007-02-16T20:22:57.000Z</updated>
<category scheme="http://schemas.google.com/g/2005#kind"
term="http://gdata.youtube.com/schemas/2007#video"/>
<category scheme="http://gdata.youtube.com/schemas/2007/keywords.cat"
term="Steventon"/>
<category scheme="http://gdata.youtube.com/schemas/2007/keywords.cat"
term="walk"/>
<category scheme="http://gdata.youtube.com/schemas/2007/keywords.cat"
term="Darcy"/>
<category scheme="http://gdata.youtube.com/schemas/2007/categories.cat"
term="Entertainment" label="Entertainment"/>
<title type="text">My walk with Mr. Darcy</title>
<content type="html"><div ... html content trimmed ...></content>
<link rel="self" type="application/atom+xml"
href="http://gdata.youtube.com/feeds/api/videos/ZTUVgYoeN_b"/>
<link rel="alternate" type="text/html"
href="http://www.youtube.com/watch?v=ZTUVgYoeN_b"/>
<link rel="http://gdata.youtube.com/schemas/2007#video.responses"
type="application/atom+xml"
href="http://gdata.youtube.com/feeds/api/videos/ZTUVgYoeN_b/responses"/>
<link rel="http://gdata.youtube.com/schemas/2007#video.ratings"
type="application/atom+xml"
href="http://gdata.youtube.com/feeds/api/videos/ZTUVgYoeN_b/ratings"/>
<link rel="http://gdata.youtube.com/schemas/2007#video.complaints"
type="application/atom+xml"
href="http://gdata.youtube.com/feeds/api/videos/ZTUVgYoeN_b/complaints"/>
<link rel="http://gdata.youtube.com/schemas/2007#video.related"
type="application/atom+xml"
href="http://gdata.youtube.com/feeds/api/videos/ZTUVgYoeN_b/related"/>
<author>
<name>Andy Samplo</name>
<uri>http://gdata.youtube.com/feeds/api/users/andyland74</uri>
</author>
<media:group>
<media:title type="plain">Shopping for Coats</media:title>
<media:description type="plain">
What could make for more exciting video?
</media:description>
<media:keywords>Shopping, parkas</media:keywords>
<yt:duration seconds="79"/>
<media:category label="People"
scheme="http://gdata.youtube.com/schemas/2007/categories.cat">People
</media:category>
<media:content
url='http://www.youtube.com/v/ZTUVgYoeN_b'
type='application/x-shockwave-flash' medium='video'
isDefault='true' expression="full" duration='215' yt:format="5"/>
<media:content
url='rtsp://rtsp2.youtube.com/ChoLENy73bIAEQ1k30OPEgGDA==/0/0/0/video.3gp'
type='video/3gpp' medium='video'
expression="full" duration='215' yt:format="1"/>
<media:content
url='rtsp://rtsp2.youtube.com/ChoLENy73bIAEQ1k30OPEgGDA==/0/0/0/video.3gp'
type='video/3gpp' medium='video'
expression="full" duration='215' yt:format="6"/>
<media:player url="http://www.youtube.com/watch?v=ZTUVgYoeN_b"/>
<media:thumbnail url="http://img.youtube.com/vi/ZTUVgYoeN_b/2.jpg"
height="97" width="130" time="00:00:03.500"/>
<media:thumbnail url="http://img.youtube.com/vi/ZTUVgYoeN_b/1.jpg"
height="97" width="130" time="00:00:01.750"/>
<media:thumbnail url="http://img.youtube.com/vi/ZTUVgYoeN_b/3.jpg"
height="97" width="130" time="00:00:05.250"/>
<media:thumbnail url="http://img.youtube.com/vi/ZTUVgYoeN_b/0.jpg"
height="240" width="320" time="00:00:03.500"/>
</media:group>
<yt:statistics viewCount="93"/>
<gd:rating min='1' max='5' numRaters='435' average='4.94'/>
<gd:comments>
<gd:feedLink
href="http://gdata.youtube.com/feeds/api/videos/ZTUVgYoeN_b/comments"
countHint='2197'/>
</gd:comments>
</entry>
</feed>"""
YOU_TUBE_COMMENT_FEED = """<?xml version='1.0' encoding='UTF-8'?>
<feed xmlns='http://www.w3.org/2005/Atom' xmlns:openSearch='http://a9.com/-/spec/opensearchrss/1.0/'>
<id>http://gdata.youtube.com/feeds/api/videos/ZTUVgYoeN_b/comments?start-index=1&max-results=25</id>
<updated>2008-02-25T23:14:03.148Z</updated>
<category scheme='http://schemas.google.com/g/2005#kind' term='http://gdata.youtube.com/schemas/2007#comment'/>
<title type='text'>Comments on 'My walk with Mr. Darcy'</title>
<logo>http://www.youtube.com/img/pic_youtubelogo_123x63.gif</logo>
<link rel='related' type='application/atom+xml' href='http://gdata.youtube.com/feeds/api/videos/ZTUVgYoeN_b'/>
<link rel='alternate' type='text/html' href='http://www.youtube.com/watch?v=ZTUVgYoeN_b'/>
<link rel='http://schemas.google.com/g/2005#feed' type='application/atom+xml' href='http://gdata.youtube.com/feeds/api/videos/ZTUVgYoeN_b/comments'/>
<link rel='self' type='application/atom+xml' href='http://gdata.youtube.com/feeds/api/videos/ZTUVgYoeN_b/comments?start-index=1&max-results=25'/>
<link rel='next' type='application/atom+xml' href='http://gdata.youtube.com/feeds/api/videos/ZTUVgYoeN_b/comments?start-index=26&max-results=25'/>
<author>
<name>YouTube</name>
<uri>http://www.youtube.com/</uri>
</author>
<generator version='beta' uri='http://gdata.youtube.com/'>YouTube data API</generator>
<openSearch:totalResults>100</openSearch:totalResults>
<openSearch:startIndex>1</openSearch:startIndex>
<openSearch:itemsPerPage>25</openSearch:itemsPerPage>
<entry>
<id>http://gdata.youtube.com/feeds/api/videos/ZTUVgYoeN_b/comments/7F2BAAD03653A691</id>
<published>2007-05-23T00:21:59.000-07:00</published>
<updated>2007-05-23T00:21:59.000-07:00</updated>
<category scheme='http://schemas.google.com/g/2005#kind' term='http://gdata.youtube.com/schemas/2007#comment'/>
<title type='text'>Walking is fun.</title>
<content type='text'>Walking is fun.</content>
<link rel='related' type='application/atom+xml' href='http://gdata.youtube.com/feeds/api/videos/ZTUVgYoeN_b'/>
<link rel='alternate' type='text/html' href='http://www.youtube.com/watch?v=ZTUVgYoeN_b'/>
<link rel='self' type='application/atom+xml' href='http://gdata.youtube.com/feeds/api/videos/ZTUVgYoeN_b/comments/7F2BAAD03653A691'/>
<author>
<name>andyland744</name>
<uri>http://gdata.youtube.com/feeds/api/users/andyland744</uri>
</author>
</entry>
</feed>"""
YOU_TUBE_PLAYLIST_FEED = """<?xml version='1.0' encoding='UTF-8'?>
<feed xmlns='http://www.w3.org/2005/Atom'
xmlns:openSearch='http://a9.com/-/spec/opensearchrss/1.0/'
xmlns:media='http://search.yahoo.com/mrss/'
xmlns:yt='http://gdata.youtube.com/schemas/2007'
xmlns:gd='http://schemas.google.com/g/2005'>
<id>http://gdata.youtube.com/feeds/users/andyland74/playlists?start-index=1&max-results=25</id>
<updated>2008-02-26T00:26:15.635Z</updated>
<category scheme='http://schemas.google.com/g/2005#kind' term='http://gdata.youtube.com/schemas/2007#playlistLink'/>
<title type='text'>andyland74's Playlists</title>
<logo>http://www.youtube.com/img/pic_youtubelogo_123x63.gif</logo>
<link rel='related' type='application/atom+xml' href='http://gdata.youtube.com/feeds/users/andyland74'/>
<link rel='alternate' type='text/html' href='http://www.youtube.com/profile_play_list?user=andyland74'/>
<link rel='http://schemas.google.com/g/2005#feed' type='application/atom+xml' href='http://gdata.youtube.com/feeds/users/andyland74/playlists'/>
<link rel='self' type='application/atom+xml' href='http://gdata.youtube.com/feeds/users/andyland74/playlists?start-index=1&max-results=25'/>
<author>
<name>andyland74</name>
<uri>http://gdata.youtube.com/feeds/users/andyland74</uri>
</author>
<generator version='beta' uri='http://gdata.youtube.com/'>YouTube data API</generator>
<openSearch:totalResults>1</openSearch:totalResults>
<openSearch:startIndex>1</openSearch:startIndex>
<openSearch:itemsPerPage>25</openSearch:itemsPerPage>
<entry>
<id>http://gdata.youtube.com/feeds/users/andyland74/playlists/8BCDD04DE8F771B2</id>
<published>2007-11-04T17:30:27.000-08:00</published>
<updated>2008-02-22T09:55:14.000-08:00</updated>
<category scheme='http://schemas.google.com/g/2005#kind' term='http://gdata.youtube.com/schemas/2007#playlistLink'/>
<title type='text'>My New Playlist Title</title>
<content type='text'>My new playlist Description</content>
<link rel='related' type='application/atom+xml' href='http://gdata.youtube.com/feeds/users/andyland74'/>
<link rel='alternate' type='text/html' href='http://www.youtube.com/view_play_list?p=8BCDD04DE8F771B2'/>
<link rel='self' type='application/atom+xml' href='http://gdata.youtube.com/feeds/users/andyland74/playlists/8BCDD04DE8F771B2'/>
<author>
<name>andyland74</name>
<uri>http://gdata.youtube.com/feeds/users/andyland74</uri>
</author>
<yt:description>My new playlist Description</yt:description>
<gd:feedLink rel='http://gdata.youtube.com/schemas/2007#playlist' href='http://gdata.youtube.com/feeds/playlists/8BCDD04DE8F771B2'/>
</entry>
</feed>"""
YOU_TUBE_SUBSCRIPTIONS_FEED = """<?xml version='1.0' encoding='UTF-8'?>
<feed xmlns='http://www.w3.org/2005/Atom'
xmlns:openSearch='http://a9.com/-/spec/opensearchrss/1.0/'
xmlns:media='http://search.yahoo.com/mrss/'
xmlns:yt='http://gdata.youtube.com/schemas/2007'
xmlns:gd='http://schemas.google.com/g/2005'>
<id>http://gdata.youtube.com/feeds/users/andyland74/subscriptions?start-index=1&max-results=25</id>
<updated>2008-02-26T00:26:15.635Z</updated>
<category scheme='http://schemas.google.com/g/2005#kind'
term='http://gdata.youtube.com/schemas/2007#subscription'/>
<title type='text'>andyland74's Subscriptions</title>
<logo>http://www.youtube.com/img/pic_youtubelogo_123x63.gif</logo>
<link rel='related' type='application/atom+xml'
href='http://gdata.youtube.com/feeds/users/andyland74'/>
<link rel='alternate' type='text/html'
href='http://www.youtube.com/profile_subscriptions?user=andyland74'/>
<link rel='http://schemas.google.com/g/2005#feed' type='application/atom+xml'
href='http://gdata.youtube.com/feeds/users/andyland74/subscriptions'/>
<link rel='self' type='application/atom+xml'
href='http://gdata.youtube.com/feeds/users/andyland74/subscriptions?start-index=1&max-results=25'/>
<author>
<name>andyland74</name>
<uri>http://gdata.youtube.com/feeds/users/andyland74</uri>
</author>
<generator version='beta' uri='http://gdata.youtube.com/'>YouTube data API</generator>
<openSearch:totalResults>1</openSearch:totalResults>
<openSearch:startIndex>1</openSearch:startIndex>
<openSearch:itemsPerPage>25</openSearch:itemsPerPage>
<entry>
<id>http://gdata.youtube.com/feeds/users/andyland74/subscriptions/d411759045e2ad8c</id>
<published>2007-11-04T17:30:27.000-08:00</published>
<updated>2008-02-22T09:55:14.000-08:00</updated>
<category scheme='http://gdata.youtube.com/schemas/2007/subscriptiontypes.cat'
term='channel'/>
<category scheme='http://schemas.google.com/g/2005#kind'
term='http://gdata.youtube.com/schemas/2007#subscription'/>
<title type='text'>Videos published by : NBC</title>
<link rel='related' type='application/atom+xml'
href='http://gdata.youtube.com/feeds/users/andyland74'/>
<link rel='alternate' type='text/html'
href='http://www.youtube.com/profile_videos?user=NBC'/>
<link rel='self' type='application/atom+xml'
href='http://gdata.youtube.com/feeds/users/andyland74/subscriptions/d411759045e2ad8c'/>
<author>
<name>andyland74</name>
<uri>http://gdata.youtube.com/feeds/users/andyland74</uri>
</author>
<yt:username>NBC</yt:username>
<gd:feedLink rel='http://gdata.youtube.com/schemas/2007#user.uploads'
href='http://gdata.youtube.com/feeds/api/users/nbc/uploads'/>
</entry>
</feed>"""
YOU_TUBE_PROFILE = """<?xml version='1.0' encoding='UTF-8'?>
<entry xmlns='http://www.w3.org/2005/Atom'
xmlns:media='http://search.yahoo.com/mrss/'
xmlns:yt='http://gdata.youtube.com/schemas/2007'
xmlns:gd='http://schemas.google.com/g/2005'>
<id>http://gdata.youtube.com/feeds/users/andyland74</id>
<published>2006-10-16T00:09:45.000-07:00</published>
<updated>2008-02-26T11:48:21.000-08:00</updated>
<category scheme='http://gdata.youtube.com/schemas/2007/channeltypes.cat'
term='Standard'/>
<category scheme='http://schemas.google.com/g/2005#kind'
term='http://gdata.youtube.com/schemas/2007#userProfile'/>
<title type='text'>andyland74 Channel</title>
<link rel='alternate' type='text/html'
href='http://www.youtube.com/profile?user=andyland74'/>
<link rel='self' type='application/atom+xml'
href='http://gdata.youtube.com/feeds/users/andyland74'/>
<author>
<name>andyland74</name>
<uri>http://gdata.youtube.com/feeds/users/andyland74</uri>
</author>
<yt:age>33</yt:age>
<yt:username>andyland74</yt:username>
<yt:books>Catch-22</yt:books>
<yt:gender>m</yt:gender>
<yt:company>Google</yt:company>
<yt:hobbies>Testing YouTube APIs</yt:hobbies>
<yt:location>US</yt:location>
<yt:movies>Aqua Teen Hungerforce</yt:movies>
<yt:music>Elliott Smith</yt:music>
<yt:occupation>Technical Writer</yt:occupation>
<yt:school>University of North Carolina</yt:school>
<media:thumbnail url='http://i.ytimg.com/vi/YFbSxcdOL-w/default.jpg'/>
<yt:statistics viewCount='9' videoWatchCount='21' subscriberCount='1'
lastWebAccess='2008-02-25T16:03:38.000-08:00'/>
<gd:feedLink rel='http://gdata.youtube.com/schemas/2007#user.favorites'
href='http://gdata.youtube.com/feeds/users/andyland74/favorites' countHint='4'/>
<gd:feedLink rel='http://gdata.youtube.com/schemas/2007#user.contacts'
href='http://gdata.youtube.com/feeds/users/andyland74/contacts' countHint='1'/>
<gd:feedLink rel='http://gdata.youtube.com/schemas/2007#user.inbox'
href='http://gdata.youtube.com/feeds/users/andyland74/inbox' countHint='0'/>
<gd:feedLink rel='http://gdata.youtube.com/schemas/2007#user.playlists'
href='http://gdata.youtube.com/feeds/users/andyland74/playlists'/>
<gd:feedLink rel='http://gdata.youtube.com/schemas/2007#user.subscriptions'
href='http://gdata.youtube.com/feeds/users/andyland74/subscriptions' countHint='4'/>
<gd:feedLink rel='http://gdata.youtube.com/schemas/2007#user.uploads'
href='http://gdata.youtube.com/feeds/users/andyland74/uploads' countHint='1'/>
</entry>"""
NEW_CONTACT = """<?xml version='1.0' encoding='UTF-8'?>
<atom:entry xmlns:atom='http://www.w3.org/2005/Atom'
xmlns:gd='http://schemas.google.com/g/2005'>
<atom:category scheme='http://schemas.google.com/g/2005#kind'
term='http://schemas.google.com/contact/2008#contact' />
<atom:title type='text'>Elizabeth Bennet</atom:title>
<atom:content type='text'>Notes</atom:content>
<gd:email rel='http://schemas.google.com/g/2005#work'
address='liz@gmail.com' />
<gd:email rel='http://schemas.google.com/g/2005#home'
address='liz@example.org' />
<gd:phoneNumber rel='http://schemas.google.com/g/2005#work'
primary='true'>(206)555-1212</gd:phoneNumber>
<gd:phoneNumber rel='http://schemas.google.com/g/2005#home'>(206)555-1213</gd:phoneNumber>
<gd:im address='liz@gmail.com'
protocol='http://schemas.google.com/g/2005#GOOGLE_TALK'
rel='http://schemas.google.com/g/2005#home' />
<gd:postalAddress rel='http://schemas.google.com/g/2005#work'
primary='true'>1600 Amphitheatre Pkwy Mountain View</gd:postalAddress>
</atom:entry>"""
CONTACTS_FEED = """<?xml version='1.0' encoding='UTF-8'?>
<feed xmlns='http://www.w3.org/2005/Atom'
xmlns:openSearch='http://a9.com/-/spec/opensearchrss/1.0/'
xmlns:gd='http://schemas.google.com/g/2005'>
<id>http://www.google.com/m8/feeds/contacts/liz%40gmail.com/base</id>
<updated>2008-03-05T12:36:38.836Z</updated>
<category scheme='http://schemas.google.com/g/2005#kind'
term='http://schemas.google.com/contact/2008#contact' />
<title type='text'>Contacts</title>
<link rel='http://schemas.google.com/g/2005#feed'
type='application/atom+xml'
href='http://www.google.com/m8/feeds/contacts/liz%40gmail.com/base' />
<link rel='http://schemas.google.com/g/2005#post'
type='application/atom+xml'
href='http://www.google.com/m8/feeds/contacts/liz%40gmail.com/base' />
<link rel='self' type='application/atom+xml'
href='http://www.google.com/m8/feeds/contacts/liz%40gmail.com/base?max-results=25' />
<author>
<name>Elizabeth Bennet</name>
<email>liz@gmail.com</email>
</author>
<generator version='1.0' uri='http://www.google.com/m8/feeds/contacts'>Contacts</generator>
<openSearch:totalResults>1</openSearch:totalResults>
<openSearch:startIndex>1</openSearch:startIndex>
<openSearch:itemsPerPage>25</openSearch:itemsPerPage>
<entry>
<id>http://www.google.com/m8/feeds/contacts/liz%40gmail.com/base/c9012de</id>
<updated>2008-03-05T12:36:38.835Z</updated>
<category scheme='http://schemas.google.com/g/2005#kind'
term='http://schemas.google.com/contact/2008#contact' />
<title type='text'>Fitzgerald</title>
<link rel='self' type='application/atom+xml'
href='http://www.google.com/m8/feeds/contacts/liz%40gmail.com/base/c9012de' />
<link rel='edit' type='application/atom+xml'
href='http://www.google.com/m8/feeds/contacts/liz%40gmail.com/base/c9012de/1204720598835000' />
<gd:phoneNumber rel='http://schemas.google.com/g/2005#home'
primary='true'>456</gd:phoneNumber>
</entry>
</feed>"""
| 58.45241
| 1,518
| 0.688413
| 21,050
| 141,864
| 4.61962
| 0.066698
| 0.05877
| 0.047901
| 0.056354
| 0.86579
| 0.84837
| 0.830065
| 0.806094
| 0.782401
| 0.756671
| 0
| 0.065864
| 0.112023
| 141,864
| 2,426
| 1,519
| 58.476505
| 0.706078
| 0.004018
| 0
| 0.615917
| 0
| 0.240484
| 0.991754
| 0.24211
| 0
| 0
| 0.000057
| 0
| 0
| 1
| 0
| false
| 0.000865
| 0.000433
| 0
| 0.000433
| 0.000433
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 1
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 9
|
d09ef96ba6c64ac2e0627c2fe027aec13b78a40a
| 239
|
py
|
Python
|
nmigen/vendor/quicklogic.py
|
psumesh/nmigen
|
7d611b8fc1d9e58853ff268ec38ff8f4131a9774
|
[
"BSD-2-Clause"
] | 528
|
2020-01-28T18:21:00.000Z
|
2021-12-09T06:27:51.000Z
|
nmigen/vendor/quicklogic.py
|
psumesh/nmigen
|
7d611b8fc1d9e58853ff268ec38ff8f4131a9774
|
[
"BSD-2-Clause"
] | 360
|
2020-01-28T18:34:30.000Z
|
2021-12-10T08:03:32.000Z
|
nmigen/vendor/quicklogic.py
|
psumesh/nmigen
|
7d611b8fc1d9e58853ff268ec38ff8f4131a9774
|
[
"BSD-2-Clause"
] | 100
|
2020-02-06T21:55:46.000Z
|
2021-11-25T19:20:44.000Z
|
from amaranth.vendor.quicklogic import *
from amaranth.vendor.quicklogic import __all__
import warnings
warnings.warn("instead of nmigen.vendor.quicklogic, use amaranth.vendor.quicklogic",
DeprecationWarning, stacklevel=2)
| 29.875
| 84
| 0.786611
| 27
| 239
| 6.814815
| 0.555556
| 0.347826
| 0.391304
| 0.304348
| 0.369565
| 0
| 0
| 0
| 0
| 0
| 0
| 0.004854
| 0.138075
| 239
| 7
| 85
| 34.142857
| 0.88835
| 0
| 0
| 0
| 0
| 0
| 0.280335
| 0.213389
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| true
| 0
| 0.6
| 0
| 0.6
| 0
| 1
| 0
| 0
| null | 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 0
| 1
| 0
|
0
| 8
|
d0c14fe497d9110b9007fb7eb9747cee777d6768
| 40
|
py
|
Python
|
service/history/__init__.py
|
volbil/ethereum-indexer
|
5efef3afef07a7afed558983385ebb666f27d5d9
|
[
"MIT"
] | null | null | null |
service/history/__init__.py
|
volbil/ethereum-indexer
|
5efef3afef07a7afed558983385ebb666f27d5d9
|
[
"MIT"
] | null | null | null |
service/history/__init__.py
|
volbil/ethereum-indexer
|
5efef3afef07a7afed558983385ebb666f27d5d9
|
[
"MIT"
] | null | null | null |
from .views import blueprint as history
| 20
| 39
| 0.825
| 6
| 40
| 5.5
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.15
| 40
| 1
| 40
| 40
| 0.970588
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| true
| 0
| 1
| 0
| 1
| 1
| 1
| 1
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 0
| 1
| 1
|
0
| 7
|
323ae5b96bc5632cb5beb3be17561a9191465d81
| 110,590
|
py
|
Python
|
WS2012R2/lisa/tools/middleware_bench/connector.py
|
bogdancarpusor/lis-test
|
c8ea3fe90918ed666bd11f8316b97d6d430c6c01
|
[
"Apache-2.0"
] | null | null | null |
WS2012R2/lisa/tools/middleware_bench/connector.py
|
bogdancarpusor/lis-test
|
c8ea3fe90918ed666bd11f8316b97d6d430c6c01
|
[
"Apache-2.0"
] | null | null | null |
WS2012R2/lisa/tools/middleware_bench/connector.py
|
bogdancarpusor/lis-test
|
c8ea3fe90918ed666bd11f8316b97d6d430c6c01
|
[
"Apache-2.0"
] | null | null | null |
"""
Linux on Hyper-V and Azure Test Code, ver. 1.0.0
Copyright (c) Microsoft Corporation
All rights reserved
Licensed under the Apache License, Version 2.0 (the "License");
you may not use this file except in compliance with the License.
You may obtain a copy of the License at
http://www.apache.org/licenses/LICENSE-2.0
Unless required by applicable law or agreed to in writing, software
distributed under the License is distributed on an "AS IS" BASIS,
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
See the License for the specific language governing permissions and
limitations under the License.
See the Apache Version 2.0 License for specific language governing
permissions and limitations under the License.
"""
import logging
import os
import time
from utils import constants
from utils import shortcut
from utils.cmdshell import SSHClient
from utils.cmdshell import WinRMClient
from report.db_utils import upload_results
from report.results_parser import OrionLogsReader, SysbenchLogsReader, MemcachedLogsReader,\
RedisLogsReader, ApacheLogsReader, MariadbLogsReader, MongodbLogsReader, ZookeeperLogsReader,\
TerasortLogsReader, TCPLogsReader, LatencyLogsReader, StorageLogsReader, SingleTCPLogsReader,\
UDPLogsReader, SQLServerLogsReader, PostgreSQLLogsReader, SchedulerLogsReader
from providers.amazon_service import AWSConnector
from providers.azure_service import AzureConnector
from providers.gcp_service import GCPConnector
logging.basicConfig(format='%(asctime)s %(levelname)s: %(message)s',
datefmt='%y/%m/%d %H:%M:%S', level=logging.INFO)
log = logging.getLogger(__name__)
def setup_env(provider=None, vm_count=None, test_type=None, disk_size=None, raid=None, keyid=None,
secret=None, token=None, subscriptionid=None, tenantid=None, projectid=None,
imageid=None, instancetype=None, user=None, localpath=None, region=None, zone=None,
sriov=False, kernel=None):
"""
Setup test environment, creating VMs and disk devices.
:param provider Service provider to be used e.g. azure, aws, gce.
:param vm_count: Number of VMs to prepare
:param test_type: vm_disk > 1 VM with disk (Orion and Sysbench)
no_disk > No disk attached (Redis, Memcached, Apache_bench)
db_disk > Second VM with disk (MariaDB, MongoDB)
cluster_disk > All VMs have disks (Terasort)
:param disk_size:
:param raid: Bool or Int (the number of disks), to specify if a RAID will be configured
:param keyid: user key for executing remote connection
:param secret: user secret for executing remote connection
:param token: GCE refresh token obtained with gcloud sdk
:param subscriptionid: Azure specific subscription id
:param tenantid: Azure specific tenant id
:param projectid: GCE specific project id
:param imageid: AWS OS AMI image id or
Azure image references offer and sku: e.g. 'UbuntuServer#16.04.0-LTS' or
GCE image family, e.g. 'ubuntu-1604-lts'
:param instancetype: AWS instance resource type e.g 'd2.4xlarge' or
Azure hardware profile vm size e.g. 'Standard_DS14_v2' or
GCE instance size e.g. 'n1-highmem-16'
:param user: remote ssh user for the instance
:param localpath: localpath where the logs should be downloaded, and the
default path for other necessary tools
:param region: region to connect to
:param zone: zone where other resources should be available
:param sriov: bool for configuring SR-IOV or not
:param kernel: kernel deb name to install provided in localpath
:rtype Tuple
:return: connector <Connector>,
vm_ips <VM private IPs dict>,
device <attached disk devices>,
ssh_client <ssh clients dict>
"""
connector = None
device = None
vms = {}
vm_ips = {}
ssh_client = {}
try:
if provider == constants.AWS:
connector = AWSConnector(keyid=keyid, secret=secret, imageid=imageid,
instancetype=instancetype, user=user, localpath=localpath,
region=region, zone=zone)
connector.connect()
for i in xrange(1, vm_count + 1):
vms[i] = connector.create_vm()
for i in xrange(1, vm_count + 1):
ssh_client[i] = connector.wait_for_ping(vms[i])
# SRIOV is enabled by default on AWS for the tested platforms
# if sriov == constants.ENABLED:
# ssh_client[i] = connector.enable_sr_iov(vms[i], ssh_client[i])
vms[i].update()
vm_ips[i] = vms[i].private_ip_address
device = constants.DEVICE_AWS.replace('sd', 'xvd')
if test_type == constants.VM_DISK:
if raid and type(raid) is int:
device = []
for i in xrange(raid):
dev = '/dev/sd{}'.format(chr(120 - i))
connector.attach_disk(vms[1], size=disk_size, iops=5000,
volume_type=connector.volume_type['ssd_io1'],
device=dev)
device.append(dev.replace('sd', 'xvd'))
time.sleep(3)
else:
connector.attach_disk(vms[1], size=disk_size, iops=5000,
volume_type=connector.volume_type['ssd_io1'],
device=constants.DEVICE_AWS)
elif test_type == constants.DB_DISK:
if raid and type(raid) is int:
device = []
for i in xrange(raid):
dev = '/dev/sd{}'.format(chr(120 - i))
connector.attach_disk(vms[2], size=disk_size, iops=5000,
volume_type=connector.volume_type['ssd_io1'],
device=dev)
device.append(dev.replace('sd', 'xvd'))
time.sleep(3)
else:
connector.attach_disk(vms[2], size=disk_size, iops=5000,
volume_type=connector.volume_type['ssd_io1'],
device=constants.DEVICE_AWS)
elif test_type == constants.CLUSTER_DISK:
connector.attach_disk(vms[1], size=disk_size + 200, iops=5000,
volume_type=connector.volume_type['ssd_io1'],
device=constants.DEVICE_AWS)
for i in xrange(2, vm_count + 1):
connector.attach_disk(vms[i], size=disk_size, iops=5000,
volume_type=connector.volume_type['ssd_io1'],
device=constants.DEVICE_AWS)
time.sleep(3)
elif provider == constants.AZURE:
connector = AzureConnector(clientid=keyid, secret=secret,
subscriptionid=subscriptionid, tenantid=tenantid,
imageid=imageid, instancetype=instancetype, user=user,
localpath=localpath, location=region, sriov=sriov)
connector.connect()
for i in xrange(1, vm_count + 1):
vms[i] = connector.create_vm()
device = constants.DEVICE_AZURE
if test_type == constants.VM_DISK:
if raid and type(raid) is int:
device = []
for i in xrange(raid):
log.info('Created disk: {}'.format(connector.attach_disk(vms[1], disk_size,
lun=i)))
device.append('/dev/sd{}'.format(chr(99 + i)))
else:
connector.attach_disk(vms[1], disk_size)
elif test_type == constants.DB_DISK:
if raid and type(raid) is int:
device = []
for i in xrange(raid):
log.info('Created disk: {}'.format(connector.attach_disk(vms[2], disk_size,
lun=i)))
device.append('/dev/sd{}'.format(chr(99 + i)))
else:
connector.attach_disk(vms[2], disk_size)
elif test_type == constants.CLUSTER_DISK:
log.info('Created disk: {}'.format(connector.attach_disk(vms[1], disk_size + 200)))
for i in xrange(2, vm_count + 1):
log.info('Created disk: {}'.format(connector.attach_disk(vms[i], disk_size)))
for i in xrange(1, vm_count + 1):
ssh_client[i] = SSHClient(server=vms[i].name + connector.dns_suffix,
host_key_file=connector.host_key_file,
user=connector.user,
ssh_key_file=os.path.join(connector.localpath,
connector.key_name + '.pem'))
cmd = ssh_client[i].run(
'ifconfig eth0 | grep "inet\ addr" | cut -d: -f2 | cut -d" " -f1')
vm_ips[i] = cmd[1].strip()
elif provider == constants.GCE:
connector = GCPConnector(clientid=keyid, secret=secret, token=token,
projectid=projectid, imageid=imageid,
instancetype=instancetype, user=user, localpath=localpath,
zone=zone)
connector.connect()
for i in xrange(1, vm_count + 1):
vms[i] = connector.create_vm()
for i in xrange(1, vm_count + 1):
ssh_client[i] = connector.wait_for_ping(vms[i])
vm_ips[i] = vms[i]['networkInterfaces'][0]['networkIP']
if test_type == constants.VM_DISK:
if raid and type(raid) is int:
device = []
for i in xrange(raid):
disk_name = connector.attach_disk(vms[1]['name'], disk_size)
log.info('Created disk: {}'.format(disk_name))
device.append('/dev/sd{}'.format(chr(98 + i)))
# device.append(constants.DEVICE_GCE + disk_name)
else:
disk_name = connector.attach_disk(vms[1]['name'], disk_size)
log.info('Created disk: {}'.format(disk_name))
# Note: using disk device order prediction,
# as the GCE API is not consistent in the disk naming/detection scheme
# device = constants.DEVICE_GCE + disk_name
device = constants.TEMP_DEVICE_GCE
elif test_type == constants.DB_DISK:
if raid and type(raid) is int:
device = []
for i in xrange(raid):
disk_name = connector.attach_disk(vms[2]['name'], disk_size)
log.info('Created disk: {}'.format(disk_name))
device.append('/dev/sd{}'.format(chr(98 + i)))
# device.append(constants.DEVICE_GCE + disk_name)
else:
disk_name = connector.attach_disk(vms[2]['name'], disk_size)
log.info('Created disk: {}'.format(disk_name))
device = constants.TEMP_DEVICE_GCE
# device = constants.DEVICE_GCE + disk_name
elif test_type == constants.CLUSTER_DISK:
disk_name = connector.attach_disk(vms[1]['name'], disk_size + 200)
log.info('Created disk: {}'.format(disk_name))
for i in xrange(2, vm_count + 1):
disk_name = connector.attach_disk(vms[i]['name'], disk_size)
log.info('Created disk: {}'.format(disk_name))
device = constants.TEMP_DEVICE_GCE
# setup perf tuning parameters
current_path = os.path.dirname(os.path.realpath(__file__))
for i in range(1, vm_count + 1):
log.info('Running perf tuning on {}'.format(vm_ips[i]))
ssh_client[i].put_file(os.path.join(current_path, 'tests', 'perf_tuning.sh'),
'/tmp/perf_tuning.sh')
ssh_client[i].run('chmod +x /tmp/perf_tuning.sh')
ssh_client[i].run("sed -i 's/\r//' /tmp/perf_tuning.sh")
params = [provider]
if '.deb' in kernel:
log.info('Uploading kernel {} on {}'.format(kernel, vm_ips[i]))
ssh_client[i].put_file(os.path.join(localpath, kernel),
'/tmp/{}'.format(kernel))
params.append('/tmp/{}'.format(kernel))
ssh_client[i].run('/tmp/perf_tuning.sh {}'.format(' '.join(params)))
if provider in[constants.AWS, constants.GCE]:
ssh_client[i] = connector.restart_vm(vms[i])
elif provider == constants.AZURE:
vms[i] = connector.restart_vm(vms[i].name)
# TODO add custom kernel support for all providers - only azure support
ssh_client[i] = SSHClient(server=vms[i].name + connector.dns_suffix,
host_key_file=connector.host_key_file,
user=connector.user,
ssh_key_file=os.path.join(connector.localpath,
connector.key_name + '.pem'))
cmd = ssh_client[i].run(
'ifconfig eth0 | grep "inet\ addr" | cut -d: -f2 | cut -d" " -f1')
vm_ips[i] = cmd[1].strip()
except Exception as e:
log.error(e)
if connector:
connector.teardown()
raise
return connector, vm_ips, device, ssh_client
def test_orion(provider, keyid, secret, token, imageid, subscription, tenant, projectid,
instancetype, user, localpath, region, zone, sriov, kernel):
"""
Run Orion test.
:param provider Service provider to be used e.g. azure, aws, gce.
:param keyid: user key for executing remote connection
:param secret: user secret for executing remote connection
:param token: GCE refresh token obtained with gcloud sdk
:param subscription: Azure specific subscription id
:param tenant: Azure specific tenant id
:param projectid: GCE specific project id
:param imageid: AWS OS AMI image id or
Azure image references offer and sku: e.g. 'UbuntuServer#16.04.0-LTS'.
:param instancetype: AWS instance resource type e.g 'd2.4xlarge' or
Azure hardware profile vm size e.g. 'Standard_DS14_v2'.
:param user: remote ssh user for the instance
:param localpath: localpath where the logs should be downloaded, and the
default path for other necessary tools
:param region: EC2 region to connect to
:param zone: EC2 zone where other resources should be available
:param sriov: Enable or disable SR-IOV
:param kernel: custom kernel name provided in localpath
"""
disk_size = 0
if provider == constants.AWS:
disk_size = 100
elif provider == constants.AZURE:
disk_size = 513
elif provider == constants.GCE:
# pd-ssd 30iops/gb => 167GB = 5010 iops
disk_size = 167
connector, vm_ips, device, ssh_client = setup_env(provider=provider, vm_count=1,
test_type=constants.VM_DISK,
disk_size=disk_size, raid=False,
keyid=keyid, secret=secret,
token=token, subscriptionid=subscription,
tenantid=tenant, projectid=projectid,
imageid=imageid, instancetype=instancetype,
user=user, localpath=localpath,
region=region, zone=zone, sriov=sriov,
kernel=kernel)
results_path = None
try:
if all(client for client in ssh_client.values()):
current_path = os.path.dirname(os.path.realpath(__file__))
ssh_client[1].put_file(os.path.join(localpath, 'orion_linux_x86-64.gz'),
'/tmp/orion_linux_x86-64.gz')
ssh_client[1].put_file(os.path.join(current_path, 'tests', 'run_orion.sh'),
'/tmp/run_orion.sh')
ssh_client[1].run('chmod +x /tmp/run_orion.sh')
ssh_client[1].run("sed -i 's/\r//' /tmp/run_orion.sh")
cmd = '/tmp/run_orion.sh {}'.format(device)
log.info('Running command {}'.format(cmd))
ssh_client[1].run(cmd, timeout=constants.TIMEOUT * 4)
results_path = os.path.join(localpath, 'orion{}_{}.zip'.format(str(time.time()),
instancetype))
ssh_client[1].get_file('/tmp/orion.zip', results_path)
except Exception as e:
log.error(e)
raise
finally:
if connector:
connector.teardown()
if results_path:
upload_results(localpath=localpath, table_name='Perf_{}_Orion'.format(provider),
results_path=results_path, parser=OrionLogsReader,
test_case_name='{}_Orion_perf_tuned'.format(provider),
host_type=shortcut.host_type(provider), instance_size=instancetype,
disk_setup='1 x SSD {}GB'.format(disk_size))
def test_orion_raid(provider, keyid, secret, token, imageid, subscription, tenant, projectid,
instancetype, user, localpath, region, zone, sriov, kernel):
"""
Run Orion test using 12 x SSD devices in RAID 0.
:param provider Service provider to be used e.g. azure, aws, gce.
:param keyid: user key for executing remote connection
:param secret: user secret for executing remote connection
:param token: GCE refresh token obtained with gcloud sdk
:param subscription: Azure specific subscription id
:param tenant: Azure specific tenant id
:param projectid: GCE specific project id
:param imageid: AWS OS AMI image id or
Azure image references offer and sku: e.g. 'UbuntuServer#16.04.0-LTS'.
:param instancetype: AWS instance resource type e.g 'd2.4xlarge' or
Azure hardware profile vm size e.g. 'Standard_DS14_v2'.
:param user: remote ssh user for the instance
:param localpath: localpath where the logs should be downloaded, and the
default path for other necessary tools
:param region: EC2 region to connect to
:param zone: EC2 zone where other resources should be available
:param sriov: Enable or disable SR-IOV
:param kernel: custom kernel name provided in localpath
"""
disk_size = 0
raid = 10
if provider == constants.AWS:
disk_size = 100
elif provider == constants.AZURE:
disk_size = 513
elif provider == constants.GCE:
disk_size = 167
connector, vm_ips, device, ssh_client = setup_env(provider=provider, vm_count=1,
test_type=constants.VM_DISK,
disk_size=disk_size, raid=raid, keyid=keyid,
secret=secret, token=token,
subscriptionid=subscription, tenantid=tenant,
projectid=projectid, imageid=imageid,
instancetype=instancetype, user=user,
localpath=localpath, region=region,
zone=zone, sriov=sriov, kernel=kernel)
results_path = None
try:
if all(client for client in ssh_client.values()):
current_path = os.path.dirname(os.path.realpath(__file__))
ssh_client[1].put_file(os.path.join(localpath, 'orion_linux_x86-64.gz'),
'/tmp/orion_linux_x86-64.gz')
ssh_client[1].put_file(os.path.join(current_path, 'tests', 'run_orion.sh'),
'/tmp/run_orion.sh')
ssh_client[1].run('chmod +x /tmp/run_orion.sh')
ssh_client[1].run("sed -i 's/\r//' /tmp/run_orion.sh")
cmd = '/tmp/run_orion.sh {}'.format(' '.join(device))
log.info('Running command {}'.format(cmd))
ssh_client[1].run(cmd, timeout=constants.TIMEOUT * 4)
results_path = os.path.join(localpath, 'orion{}_{}.zip'.format(str(time.time()),
instancetype))
ssh_client[1].get_file('/tmp/orion.zip', results_path)
except Exception as e:
log.error(e)
raise
finally:
if connector:
connector.teardown()
if results_path:
upload_results(localpath=localpath, table_name='Perf_{}_Orion'.format(provider),
results_path=results_path, parser=OrionLogsReader,
test_case_name='{}_Orion_perf_tuned'.format(provider),
host_type=shortcut.host_type(provider), instance_size=instancetype,
disk_setup='{} x SSD {}GB'.format(raid, disk_size))
def test_sysbench(provider, keyid, secret, token, imageid, subscription, tenant, projectid,
instancetype, user, localpath, region, zone, sriov, kernel):
"""
Run Sysbench test.
:param provider Service provider to be used e.g. azure, aws, gce.
:param keyid: user key for executing remote connection
:param secret: user secret for executing remote connection
:param token: GCE refresh token obtained with gcloud sdk
:param subscription: Azure specific subscription id
:param tenant: Azure specific tenant id
:param projectid: GCE specific project id
:param imageid: AWS OS AMI image id or
Azure image references offer and sku: e.g. 'UbuntuServer#16.04.0-LTS'.
:param instancetype: AWS instance resource type e.g 'd2.4xlarge' or
Azure hardware profile vm size e.g. 'Standard_DS14_v2'.
:param user: remote ssh user for the instance
:param localpath: localpath where the logs should be downloaded, and the
default path for other necessary tools
:param region: EC2 region to connect to
:param zone: EC2 zone where other resources should be available
:param sriov: Enable or disable SR-IOV
:param kernel: custom kernel name provided in localpath
"""
disk_size = 0
if provider == constants.AWS:
disk_size = 100
elif provider == constants.AZURE:
disk_size = 513
elif provider == constants.GCE:
disk_size = 167
connector, vm_ips, device, ssh_client = setup_env(provider=provider, vm_count=1,
test_type=constants.VM_DISK,
disk_size=disk_size, raid=False,
keyid=keyid, secret=secret,
token=token, subscriptionid=subscription,
tenantid=tenant, projectid=projectid,
imageid=imageid, instancetype=instancetype,
user=user, localpath=localpath,
region=region, zone=zone, sriov=sriov,
kernel=kernel)
results_path = None
try:
if all(client for client in ssh_client.values()):
current_path = os.path.dirname(os.path.realpath(__file__))
ssh_client[1].put_file(os.path.join(current_path, 'tests', 'run_sysbench.sh'),
'/tmp/run_sysbench.sh')
ssh_client[1].run('chmod +x /tmp/run_sysbench.sh')
ssh_client[1].run("sed -i 's/\r//' /tmp/run_sysbench.sh")
cmd = '/tmp/run_sysbench.sh {}'.format(device)
log.info('Running command {}'.format(cmd))
ssh_client[1].run(cmd, timeout=constants.TIMEOUT * 5)
results_path = os.path.join(localpath, 'sysbench{}_{}.zip'.format(str(time.time()),
instancetype))
ssh_client[1].get_file('/tmp/sysbench.zip', results_path)
except Exception as e:
log.error(e)
raise
finally:
if connector:
connector.teardown()
if results_path:
upload_results(localpath=localpath, table_name='Perf_{}_Sysbench'.format(provider),
results_path=results_path, parser=SysbenchLogsReader,
test_case_name='{}_sysbench_fileio_perf_tuned'.format(provider),
host_type=shortcut.host_type(provider), instance_size=instancetype,
disk_setup='1 x SSD {}GB'.format(disk_size))
def test_sysbench_raid(provider, keyid, secret, token, imageid, subscription, tenant, projectid,
instancetype, user, localpath, region, zone, sriov, kernel):
"""
Run Sysbench test using 12 x SSD devices in RAID 0.
:param provider Service provider to be used e.g. azure, aws, gce.
:param keyid: user key for executing remote connection
:param secret: user secret for executing remote connection
:param token: GCE refresh token obtained with gcloud sdk
:param subscription: Azure specific subscription id
:param tenant: Azure specific tenant id
:param projectid: GCE specific project id
:param imageid: AWS OS AMI image id or
Azure image references offer and sku: e.g. 'UbuntuServer#16.04.0-LTS'.
:param instancetype: AWS instance resource type e.g 'd2.4xlarge' or
Azure hardware profile vm size e.g. 'Standard_DS14_v2'.
:param user: remote ssh user for the instance
:param localpath: localpath where the logs should be downloaded, and the
default path for other necessary tools
:param region: EC2 region to connect to
:param zone: EC2 zone where other resources should be available
:param sriov: Enable or disable SR-IOV
:param kernel: custom kernel name provided in localpath
"""
disk_size = 0
raid = 10
if provider == constants.AWS:
disk_size = 100
elif provider == constants.AZURE:
disk_size = 513
elif provider == constants.GCE:
disk_size = 167
connector, vm_ips, device, ssh_client = setup_env(provider=provider, vm_count=1,
test_type=constants.VM_DISK,
disk_size=disk_size, raid=raid, keyid=keyid,
secret=secret, token=token,
subscriptionid=subscription, tenantid=tenant,
projectid=projectid, imageid=imageid,
instancetype=instancetype, user=user,
localpath=localpath, region=region,
zone=zone, sriov=sriov, kernel=kernel)
results_path = None
try:
if all(client for client in ssh_client.values()):
current_path = os.path.dirname(os.path.realpath(__file__))
ssh_client[1].put_file(os.path.join(current_path, 'tests', 'raid.sh'), '/tmp/raid.sh')
ssh_client[1].run('chmod +x /tmp/raid.sh')
ssh_client[1].run("sed -i 's/\r//' /tmp/raid.sh")
ssh_client[1].run('/tmp/raid.sh 0 {} {}'.format(raid, ' '.join(device)))
ssh_client[1].put_file(os.path.join(current_path, 'tests', 'run_sysbench.sh'),
'/tmp/run_sysbench.sh')
ssh_client[1].run('chmod +x /tmp/run_sysbench.sh')
ssh_client[1].run("sed -i 's/\r//' /tmp/run_sysbench.sh")
cmd = '/tmp/run_sysbench.sh {}'.format(constants.RAID_DEV)
log.info('Running command {}'.format(cmd))
ssh_client[1].run(cmd, timeout=constants.TIMEOUT * 5)
results_path = os.path.join(localpath, 'sysbench{}_{}.zip'.format(str(time.time()),
instancetype))
ssh_client[1].get_file('/tmp/sysbench.zip', results_path)
except Exception as e:
log.error(e)
raise
finally:
if connector:
connector.teardown()
if results_path:
upload_results(localpath=localpath, table_name='Perf_{}_Sysbench'.format(provider),
results_path=results_path, parser=SysbenchLogsReader,
test_case_name='{}_sysbench_fileio_perf_tuned'.format(provider),
host_type=shortcut.host_type(provider), instance_size=instancetype,
disk_setup='{} x SSD {}GB RAID0'.format(raid, disk_size))
def test_memcached(provider, keyid, secret, token, imageid, subscription, tenant, projectid,
instancetype, user, localpath, region, zone, sriov, kernel):
"""
Run memcached test on 2 instances.
:param provider Service provider to be used e.g. azure, aws, gce.
:param keyid: user key for executing remote connection
:param secret: user secret for executing remote connection
:param token: GCE refresh token obtained with gcloud sdk
:param subscription: Azure specific subscription id
:param tenant: Azure specific tenant id
:param projectid: GCE specific project id
:param imageid: AWS OS AMI image id or
Azure image references offer and sku: e.g. 'UbuntuServer#16.04.0-LTS'.
:param instancetype: AWS instance resource type e.g 'd2.4xlarge' or
Azure hardware profile vm size e.g. 'Standard_DS14_v2'.
:param user: remote ssh user for the instance
:param localpath: localpath where the logs should be downloaded, and the
default path for other necessary tools
:param region: EC2 region to connect to
:param zone: EC2 zone where other resources should be available
:param sriov: Enable or disable SR-IOV
:param kernel: custom kernel name provided in localpath
"""
connector, vm_ips, device, ssh_client = setup_env(provider=provider, vm_count=2,
test_type=None, disk_size=None, raid=False,
keyid=keyid, secret=secret, token=token,
subscriptionid=subscription, tenantid=tenant,
projectid=projectid, imageid=imageid,
instancetype=instancetype, user=user,
localpath=localpath, region=region,
zone=zone, sriov=sriov, kernel=kernel)
results_path = None
try:
if all(client for client in ssh_client.values()):
# enable key auth between instances
ssh_client[1].put_file(os.path.join(localpath, connector.key_name + '.pem'),
'/home/{}/.ssh/id_rsa'.format(user))
ssh_client[1].run('chmod 0600 /home/{0}/.ssh/id_rsa'.format(user))
current_path = os.path.dirname(os.path.realpath(__file__))
ssh_client[1].put_file(os.path.join(current_path, 'tests', 'run_memcached.sh'),
'/tmp/run_memcached.sh')
ssh_client[1].run('chmod +x /tmp/run_memcached.sh')
ssh_client[1].run("sed -i 's/\r//' /tmp/run_memcached.sh")
cmd = '/tmp/run_memcached.sh {} {}'.format(vm_ips[2], user)
log.info('Running command {}'.format(cmd))
ssh_client[1].run(cmd, timeout=constants.TIMEOUT)
results_path = os.path.join(localpath, 'memcached{}_{}.zip'.format(str(time.time()),
instancetype))
ssh_client[1].get_file('/tmp/memcached.zip', results_path)
except Exception as e:
log.error(e)
raise
finally:
if connector:
connector.teardown()
if results_path:
upload_results(localpath=localpath, table_name='Perf_{}_Memcached'.format(provider),
results_path=results_path, parser=MemcachedLogsReader,
test_case_name='{}_memcached_perf_tuned'.format(provider),
data_path=shortcut.data_path(sriov), host_type=shortcut.host_type(provider),
instance_size=instancetype)
def test_redis(provider, keyid, secret, token, imageid, subscription, tenant, projectid,
instancetype, user, localpath, region, zone, sriov, kernel):
"""
Run redis test on 2 instances.
:param provider Service provider to be used e.g. azure, aws, gce.
:param keyid: user key for executing remote connection
:param secret: user secret for executing remote connection
:param token: GCE refresh token obtained with gcloud sdk
:param subscription: Azure specific subscription id
:param tenant: Azure specific tenant id
:param projectid: GCE specific project id
:param imageid: AWS OS AMI image id or
Azure image references offer and sku: e.g. 'UbuntuServer#16.04.0-LTS'.
:param instancetype: AWS instance resource type e.g 'd2.4xlarge' or
Azure hardware profile vm size e.g. 'Standard_DS14_v2'.
:param user: remote ssh user for the instance
:param localpath: localpath where the logs should be downloaded, and the
default path for other necessary tools
:param region: EC2 region to connect to
:param zone: EC2 zone where other resources should be available
:param sriov: Enable or disable SR-IOV
:param kernel: custom kernel name provided in localpath
"""
connector, vm_ips, device, ssh_client = setup_env(provider=provider, vm_count=2,
test_type=None, disk_size=None, raid=False,
keyid=keyid, secret=secret, token=token,
subscriptionid=subscription, tenantid=tenant,
projectid=projectid, imageid=imageid,
instancetype=instancetype, user=user,
localpath=localpath, region=region,
zone=zone, sriov=sriov, kernel=kernel)
results_path = None
try:
if all(client for client in ssh_client.values()):
# enable key auth between instances
ssh_client[1].put_file(os.path.join(localpath, connector.key_name + '.pem'),
'/home/{}/.ssh/id_rsa'.format(user))
ssh_client[1].run('chmod 0600 /home/{0}/.ssh/id_rsa'.format(user))
current_path = os.path.dirname(os.path.realpath(__file__))
ssh_client[1].put_file(os.path.join(current_path, 'tests', 'run_redis.sh'),
'/tmp/run_redis.sh')
ssh_client[1].run('chmod +x /tmp/run_redis.sh')
ssh_client[1].run("sed -i 's/\r//' /tmp/run_redis.sh")
cmd = '/tmp/run_redis.sh {} {}'.format(vm_ips[2], user)
log.info('Running command {}'.format(cmd))
ssh_client[1].run(cmd, timeout=constants.TIMEOUT)
results_path = os.path.join(localpath, 'redis{}_{}.zip'.format(str(time.time()),
instancetype))
ssh_client[1].get_file('/tmp/redis.zip', results_path)
except Exception as e:
log.error(e)
raise
finally:
if connector:
connector.teardown()
if results_path:
upload_results(localpath=localpath, table_name='Perf_{}_Redis'.format(provider),
results_path=results_path, parser=RedisLogsReader,
test_case_name='{}_redis_perf_tuned'.format(provider),
data_path=shortcut.data_path(sriov), host_type=shortcut.host_type(provider),
instance_size=instancetype)
def test_apache_bench(provider, keyid, secret, token, imageid, subscription, tenant, projectid,
instancetype, user, localpath, region, zone, sriov, kernel):
"""
Run Apache Benchmark on Apache web server.
:param provider Service provider to be used e.g. azure, aws, gce.
:param keyid: user key for executing remote connection
:param secret: user secret for executing remote connection
:param token: GCE refresh token obtained with gcloud sdk
:param subscription: Azure specific subscription id
:param tenant: Azure specific tenant id
:param projectid: GCE specific project id
:param imageid: AWS OS AMI image id or
Azure image references offer and sku: e.g. 'UbuntuServer#16.04.0-LTS'.
:param instancetype: AWS instance resource type e.g 'd2.4xlarge' or
Azure hardware profile vm size e.g. 'Standard_DS14_v2'.
:param user: remote ssh user for the instance
:param localpath: localpath where the logs should be downloaded, and the
default path for other necessary tools
:param region: EC2 region to connect to
:param zone: EC2 zone where other resources should be available
:param sriov: Enable or disable SR-IOV
:param kernel: custom kernel name provided in localpath
"""
vm_count = 2
connector, vm_ips, device, ssh_client = setup_env(provider=provider, vm_count=vm_count,
test_type=None, disk_size=None, raid=False,
keyid=keyid, secret=secret, token=token,
subscriptionid=subscription, tenantid=tenant,
projectid=projectid, imageid=imageid,
instancetype=instancetype, user=user,
localpath=localpath, region=region,
zone=zone, sriov=sriov, kernel=kernel)
results_path = None
try:
if all(client for client in ssh_client.values()):
# enable key auth between instances
ssh_client[1].put_file(os.path.join(localpath, connector.key_name + '.pem'),
'/home/{}/.ssh/id_rsa'.format(user))
ssh_client[1].run('chmod 0600 /home/{0}/.ssh/id_rsa'.format(user))
current_path = os.path.dirname(os.path.realpath(__file__))
ssh_client[1].put_file(os.path.join(current_path, 'tests', 'run_apache_bench.sh'),
'/tmp/run_apache_bench.sh')
ssh_client[1].run('chmod +x /tmp/run_apache_bench.sh')
ssh_client[1].run("sed -i 's/\r//' /tmp/run_apache_bench.sh")
cmd = '/tmp/run_apache_bench.sh {} {}'.format(vm_ips[2], user)
log.info('Running command {}'.format(cmd))
ssh_client[1].run(cmd, timeout=constants.TIMEOUT)
results_path = os.path.join(localpath, 'apache_bench{}_{}.zip'.format(str(time.time()),
instancetype))
ssh_client[1].get_file('/tmp/apache_bench.zip', results_path)
except Exception as e:
log.error(e)
raise
finally:
if connector:
connector.teardown()
if results_path:
upload_results(localpath=localpath, table_name='Perf_{}_Apache'.format(provider),
results_path=results_path, parser=ApacheLogsReader,
test_case_name='{}_Apache_bench_perf_tuned'.format(provider),
data_path=shortcut.data_path(sriov), host_type=shortcut.host_type(provider),
instance_size=instancetype)
def test_nginx_bench(provider, keyid, secret, token, imageid, subscription, tenant, projectid,
instancetype, user, localpath, region, zone, sriov, kernel):
"""
Run Apache Benchmark on Nginx web server.
:param provider Service provider to be used e.g. azure, aws, gce.
:param keyid: user key for executing remote connection
:param secret: user secret for executing remote connection
:param token: GCE refresh token obtained with gcloud sdk
:param subscription: Azure specific subscription id
:param tenant: Azure specific tenant id
:param projectid: GCE specific project id
:param imageid: AWS OS AMI image id or
Azure image references offer and sku: e.g. 'UbuntuServer#16.04.0-LTS'.
:param instancetype: AWS instance resource type e.g 'd2.4xlarge' or
Azure hardware profile vm size e.g. 'Standard_DS14_v2'.
:param user: remote ssh user for the instance
:param localpath: localpath where the logs should be downloaded, and the
default path for other necessary tools
:param region: EC2 region to connect to
:param zone: EC2 zone where other resources should be available
:param sriov: Enable or disable SR-IOV
:param kernel: custom kernel name provided in localpath
"""
vm_count = 2
connector, vm_ips, device, ssh_client = setup_env(provider=provider, vm_count=vm_count,
test_type=None, disk_size=None, raid=False,
keyid=keyid, secret=secret, token=token,
subscriptionid=subscription, tenantid=tenant,
projectid=projectid, imageid=imageid,
instancetype=instancetype, user=user,
localpath=localpath, region=region,
zone=zone, sriov=sriov, kernel=kernel)
results_path = None
try:
if all(client for client in ssh_client.values()):
# enable key auth between instances
ssh_client[1].put_file(os.path.join(localpath, connector.key_name + '.pem'),
'/home/{}/.ssh/id_rsa'.format(user))
ssh_client[1].run('chmod 0600 /home/{0}/.ssh/id_rsa'.format(user))
current_path = os.path.dirname(os.path.realpath(__file__))
ssh_client[1].put_file(os.path.join(current_path, 'tests', 'run_nginx_bench.sh'),
'/tmp/run_nginx_bench.sh')
ssh_client[1].run('chmod +x /tmp/run_nginx_bench.sh')
ssh_client[1].run("sed -i 's/\r//' /tmp/run_nginx_bench.sh")
cmd = '/tmp/run_nginx_bench.sh {} {}'.format(vm_ips[2], user)
log.info('Running command {}'.format(cmd))
ssh_client[1].run(cmd, timeout=constants.TIMEOUT)
results_path = os.path.join(localpath, 'nginx_bench{}_{}.zip'.format(str(time.time()),
instancetype))
ssh_client[1].get_file('/tmp/nginx_bench.zip', results_path)
except Exception as e:
log.error(e)
raise
finally:
if connector:
connector.teardown()
if results_path:
upload_results(localpath=localpath, table_name='Perf_{}_Nginx'.format(provider),
results_path=results_path, parser=ApacheLogsReader,
test_case_name='{}_Apache_bench_perf_tuned'.format(provider),
data_path=shortcut.data_path(sriov), host_type=shortcut.host_type(provider),
instance_size=instancetype)
def test_mariadb(provider, keyid, secret, token, imageid, subscription, tenant, projectid,
instancetype, user, localpath, region, zone, sriov, kernel):
"""
Run MariaDB test on 2 instances.
:param provider Service provider to be used e.g. azure, aws, gce.
:param keyid: user key for executing remote connection
:param secret: user secret for executing remote connection
:param token: GCE refresh token obtained with gcloud sdk
:param subscription: Azure specific subscription id
:param tenant: Azure specific tenant id
:param projectid: GCE specific project id
:param imageid: AWS OS AMI image id or
Azure image references offer and sku: e.g. 'UbuntuServer#16.04.0-LTS'.
:param instancetype: AWS instance resource type e.g 'd2.4xlarge' or
Azure hardware profile vm size e.g. 'Standard_DS14_v2'.
:param user: remote ssh user for the instance
:param localpath: localpath where the logs should be downloaded, and the
default path for other necessary tools
:param region: EC2 region to connect to
:param zone: EC2 zone where other resources should be available
:param sriov: Enable or disable SR-IOV
:param kernel: custom kernel name provided in localpath
"""
disk_size = 0
if provider == constants.AWS:
disk_size = 100
elif provider == constants.AZURE:
disk_size = 513
elif provider == constants.GCE:
disk_size = 167
connector, vm_ips, device, ssh_client = setup_env(provider=provider, vm_count=2,
test_type=constants.DB_DISK,
disk_size=disk_size, raid=False, keyid=keyid,
secret=secret, token=token,
subscriptionid=subscription, tenantid=tenant,
projectid=projectid, imageid=imageid,
instancetype=instancetype, user=user,
localpath=localpath, region=region,
zone=zone, sriov=sriov, kernel=kernel)
results_path = None
try:
if all(client for client in ssh_client.values()):
# enable key auth between instances
ssh_client[1].put_file(os.path.join(localpath, connector.key_name + '.pem'),
'/home/{}/.ssh/id_rsa'.format(user))
ssh_client[1].run('chmod 0600 /home/{0}/.ssh/id_rsa'.format(user))
current_path = os.path.dirname(os.path.realpath(__file__))
ssh_client[1].put_file(os.path.join(current_path, 'tests', 'run_mariadb.sh'),
'/tmp/run_mariadb.sh')
ssh_client[1].run('chmod +x /tmp/run_mariadb.sh')
ssh_client[1].run("sed -i 's/\r//' /tmp/run_mariadb.sh")
cmd = '/tmp/run_mariadb.sh {} {} {}'.format(vm_ips[2], user, device)
log.info('Running command {}'.format(cmd))
ssh_client[1].run(cmd, timeout=constants.TIMEOUT)
results_path = os.path.join(localpath, 'mariadb{}_{}.zip'.format(str(time.time()),
instancetype))
ssh_client[1].get_file('/tmp/mariadb.zip', results_path)
except Exception as e:
log.error(e)
raise
finally:
if connector:
connector.teardown()
if results_path:
upload_results(localpath=localpath, table_name='Perf_{}_MariaDB'.format(provider),
results_path=results_path, parser=MariadbLogsReader,
test_case_name='{}_MariaDB_perf_tuned'.format(provider),
data_path=shortcut.data_path(sriov), host_type=shortcut.host_type(provider),
instance_size=instancetype, disk_setup='1 x SSD {}GB'.format(disk_size))
def test_mariadb_raid(provider, keyid, secret, token, imageid, subscription, tenant, projectid,
instancetype, user, localpath, region, zone, sriov, kernel):
"""
Run MariaDB test on 2 instances using 12 x SSD devices in RAID 0.
:param provider Service provider to be used e.g. azure, aws, gce.
:param keyid: user key for executing remote connection
:param secret: user secret for executing remote connection
:param token: GCE refresh token obtained with gcloud sdk
:param subscription: Azure specific subscription id
:param tenant: Azure specific tenant id
:param projectid: GCE specific project id
:param imageid: AWS OS AMI image id or
Azure image references offer and sku: e.g. 'UbuntuServer#16.04.0-LTS'.
:param instancetype: AWS instance resource type e.g 'd2.4xlarge' or
Azure hardware profile vm size e.g. 'Standard_DS14_v2'.
:param user: remote ssh user for the instance
:param localpath: localpath where the logs should be downloaded, and the
default path for other necessary tools
:param region: EC2 region to connect to
:param zone: EC2 zone where other resources should be available
:param sriov: Enable or disable SR-IOV
:param kernel: custom kernel name provided in localpath
"""
disk_size = 0
raid = 10
if provider == constants.AWS:
disk_size = 100
elif provider == constants.AZURE:
disk_size = 513
elif provider == constants.GCE:
disk_size = 167
connector, vm_ips, device, ssh_client = setup_env(provider=provider, vm_count=2,
test_type=constants.DB_DISK,
disk_size=disk_size, raid=raid, keyid=keyid,
secret=secret, token=token,
subscriptionid=subscription, tenantid=tenant,
projectid=projectid, imageid=imageid,
instancetype=instancetype, user=user,
localpath=localpath, region=region,
zone=zone, sriov=sriov, kernel=kernel)
results_path = None
try:
if all(client for client in ssh_client.values()):
# enable key auth between instances
ssh_client[1].put_file(os.path.join(localpath, connector.key_name + '.pem'),
'/home/{}/.ssh/id_rsa'.format(user))
ssh_client[1].run('chmod 0600 /home/{0}/.ssh/id_rsa'.format(user))
current_path = os.path.dirname(os.path.realpath(__file__))
ssh_client[2].put_file(os.path.join(current_path, 'tests', 'raid.sh'), '/tmp/raid.sh')
ssh_client[2].run('chmod +x /tmp/raid.sh')
ssh_client[2].run("sed -i 's/\r//' /tmp/raid.sh")
ssh_client[2].run('/tmp/raid.sh 0 {} {}'.format(raid, ' '.join(device)))
ssh_client[1].put_file(os.path.join(current_path, 'tests', 'run_mariadb.sh'),
'/tmp/run_mariadb.sh')
ssh_client[1].run('chmod +x /tmp/run_mariadb.sh')
ssh_client[1].run("sed -i 's/\r//' /tmp/run_mariadb.sh")
cmd = '/tmp/run_mariadb.sh {} {} {}'.format(vm_ips[2], user, constants.RAID_DEV)
log.info('Running command {}'.format(cmd))
ssh_client[1].run(cmd, timeout=constants.TIMEOUT)
results_path = os.path.join(localpath, 'mariadb{}_{}.zip'.format(str(time.time()),
instancetype))
ssh_client[1].get_file('/tmp/mariadb.zip', results_path)
except Exception as e:
log.error(e)
raise
finally:
if connector:
connector.teardown()
if results_path:
upload_results(localpath=localpath, table_name='Perf_{}_MariaDB'.format(provider),
results_path=results_path, parser=MariadbLogsReader,
test_case_name='{}_MariaDB_perf_tuned'.format(provider),
data_path=shortcut.data_path(sriov), host_type=shortcut.host_type(provider),
instance_size=instancetype,
disk_setup='{} x SSD {}GB RAID0'.format(raid, disk_size))
def test_mongodb(provider, keyid, secret, token, imageid, subscription, tenant, projectid,
instancetype, user, localpath, region, zone, sriov, kernel):
"""
Run MongoDB YCBS benchmark test on 2 instances.
:param provider Service provider to be used e.g. azure, aws, gce.
:param keyid: user key for executing remote connection
:param secret: user secret for executing remote connection
:param token: GCE refresh token obtained with gcloud sdk
:param subscription: Azure specific subscription id
:param tenant: Azure specific tenant id
:param projectid: GCE specific project id
:param imageid: AWS OS AMI image id or
Azure image references offer and sku: e.g. 'UbuntuServer#16.04.0-LTS'.
:param instancetype: AWS instance resource type e.g 'd2.4xlarge' or
Azure hardware profile vm size e.g. 'Standard_DS14_v2'.
:param user: remote ssh user for the instance
:param localpath: localpath where the logs should be downloaded, and the
default path for other necessary tools
:param region: EC2 region to connect to
:param zone: EC2 zone where other resources should be available
:param sriov: Enable or disable SR-IOV
:param kernel: custom kernel name provided in localpath
"""
disk_size = 0
if provider == constants.AWS:
disk_size = 100
elif provider == constants.AZURE:
disk_size = 513
elif provider == constants.GCE:
disk_size = 167
connector, vm_ips, device, ssh_client = setup_env(provider=provider, vm_count=2,
test_type=constants.DB_DISK,
disk_size=disk_size, raid=False, keyid=keyid,
secret=secret, token=token,
subscriptionid=subscription, tenantid=tenant,
projectid=projectid, imageid=imageid,
instancetype=instancetype, user=user,
localpath=localpath, region=region,
zone=zone, sriov=sriov, kernel=kernel)
results_path = None
try:
if all(client for client in ssh_client.values()):
# enable key auth between instances
ssh_client[1].put_file(os.path.join(localpath, connector.key_name + '.pem'),
'/home/{}/.ssh/id_rsa'.format(user))
ssh_client[1].run('chmod 0600 /home/{0}/.ssh/id_rsa'.format(user))
current_path = os.path.dirname(os.path.realpath(__file__))
ssh_client[1].put_file(os.path.join(current_path, 'tests', 'run_mongodb.sh'),
'/tmp/run_mongodb.sh')
ssh_client[1].run('chmod +x /tmp/run_mongodb.sh')
ssh_client[1].run("sed -i 's/\r//' /tmp/run_mongodb.sh")
cmd = '/tmp/run_mongodb.sh {} {} {}'.format(vm_ips[2], user, device)
log.info('Running command {}'.format(cmd))
ssh_client[1].run(cmd, timeout=constants.TIMEOUT)
results_path = os.path.join(localpath, 'mongodb{}_{}.zip'.format(str(time.time()),
instancetype))
ssh_client[1].get_file('/tmp/mongodb.zip', results_path)
except Exception as e:
log.error(e)
raise
finally:
if connector:
connector.teardown()
if results_path:
upload_results(localpath=localpath, table_name='Perf_{}_MongoDB'.format(provider),
results_path=results_path, parser=MongodbLogsReader,
test_case_name='{}_MongoDB_perf_tuned'.format(provider),
data_path=shortcut.data_path(sriov), host_type=shortcut.host_type(provider),
instance_size=instancetype, disk_setup='1 x SSD {}GB'.format(disk_size))
def test_mongodb_raid(provider, keyid, secret, token, imageid, subscription, tenant, projectid,
instancetype, user, localpath, region, zone, sriov, kernel):
"""
Run MongoDB YCBS benchmark test on 2 instances using 12 x SSD devices in RAID 0.
:param provider Service provider to be used e.g. azure, aws, gce.
:param keyid: user key for executing remote connection
:param secret: user secret for executing remote connection
:param token: GCE refresh token obtained with gcloud sdk
:param subscription: Azure specific subscription id
:param tenant: Azure specific tenant id
:param projectid: GCE specific project id
:param imageid: AWS OS AMI image id or
Azure image references offer and sku: e.g. 'UbuntuServer#16.04.0-LTS'.
:param instancetype: AWS instance resource type e.g 'd2.4xlarge' or
Azure hardware profile vm size e.g. 'Standard_DS14_v2'.
:param user: remote ssh user for the instance
:param localpath: localpath where the logs should be downloaded, and the
default path for other necessary tools
:param region: EC2 region to connect to
:param zone: EC2 zone where other resources should be available
:param sriov: Enable or disable SR-IOV
:param kernel: custom kernel name provided in localpath
"""
disk_size = 0
raid = 10
if provider == constants.AWS:
disk_size = 100
elif provider == constants.AZURE:
disk_size = 513
elif provider == constants.GCE:
disk_size = 167
connector, vm_ips, device, ssh_client = setup_env(provider=provider, vm_count=2,
test_type=constants.DB_DISK,
disk_size=disk_size, raid=raid, keyid=keyid,
secret=secret, token=token,
subscriptionid=subscription, tenantid=tenant,
projectid=projectid, imageid=imageid,
instancetype=instancetype, user=user,
localpath=localpath, region=region,
zone=zone, sriov=sriov, kernel=kernel)
results_path = None
try:
if all(client for client in ssh_client.values()):
# enable key auth between instances
ssh_client[1].put_file(os.path.join(localpath, connector.key_name + '.pem'),
'/home/{}/.ssh/id_rsa'.format(user))
ssh_client[1].run('chmod 0600 /home/{0}/.ssh/id_rsa'.format(user))
current_path = os.path.dirname(os.path.realpath(__file__))
ssh_client[2].put_file(os.path.join(current_path, 'tests', 'raid.sh'), '/tmp/raid.sh')
ssh_client[2].run('chmod +x /tmp/raid.sh')
ssh_client[2].run("sed -i 's/\r//' /tmp/raid.sh")
ssh_client[2].run('/tmp/raid.sh 0 {} {}'.format(raid, ' '.join(device)))
ssh_client[1].put_file(os.path.join(current_path, 'tests', 'run_mongodb.sh'),
'/tmp/run_mongodb.sh')
ssh_client[1].run('chmod +x /tmp/run_mongodb.sh')
ssh_client[1].run("sed -i 's/\r//' /tmp/run_mongodb.sh")
cmd = '/tmp/run_mongodb.sh {} {} {}'.format(vm_ips[2], user, constants.RAID_DEV)
log.info('Running command {}'.format(cmd))
ssh_client[1].run(cmd, timeout=constants.TIMEOUT)
results_path = os.path.join(localpath, 'mongodb{}_{}.zip'.format(str(time.time()),
instancetype))
ssh_client[1].get_file('/tmp/mongodb.zip', results_path)
except Exception as e:
log.error(e)
raise
finally:
if connector:
connector.teardown()
if results_path:
upload_results(localpath=localpath, table_name='Perf_{}_MongoDB'.format(provider),
results_path=results_path, parser=MongodbLogsReader,
test_case_name='{}_MongoDB_perf_tuned'.format(provider),
data_path=shortcut.data_path(sriov), host_type=shortcut.host_type(provider),
instance_size=instancetype,
disk_setup='{} x SSD {}GB RAID0'.format(raid, disk_size))
def test_postgresql(provider, keyid, secret, token, imageid, subscription, tenant, projectid,
instancetype, user, localpath, region, zone, sriov, kernel):
"""
Run Pgbench benchmark on PostgreSQL server with a dedicated client.
:param provider Service provider to be used e.g. azure, aws, gce.
:param keyid: user key for executing remote connection
:param secret: user secret for executing remote connection
:param token: GCE refresh token obtained with gcloud sdk
:param subscription: Azure specific subscription id
:param tenant: Azure specific tenant id
:param projectid: GCE specific project id
:param imageid: AWS OS AMI image id or
Azure image references offer and sku: e.g. 'UbuntuServer#16.04.0-LTS'.
:param instancetype: AWS instance resource type e.g 'd2.4xlarge' or
Azure hardware profile vm size e.g. 'Standard_DS14_v2'.
:param user: remote ssh user for the instance
:param localpath: localpath where the logs should be downloaded, and the
default path for other necessary tools
:param region: EC2 region to connect to
:param zone: EC2 zone where other resources should be available
:param sriov: Enable or disable SR-IOV
:param kernel: custom kernel name provided in localpath
"""
disk_size = 0
if provider == constants.AWS:
disk_size = 300
elif provider == constants.AZURE:
disk_size = 513
elif provider == constants.GCE:
disk_size = 167
connector, vm_ips, device, ssh_client = setup_env(provider=provider, vm_count=2,
test_type=constants.DB_DISK,
disk_size=disk_size, raid=False, keyid=keyid,
secret=secret, token=token,
subscriptionid=subscription, tenantid=tenant,
projectid=projectid, imageid=imageid,
instancetype=instancetype, user=user,
localpath=localpath, region=region,
zone=zone, sriov=sriov, kernel=kernel)
results_path = None
try:
if all(client for client in ssh_client.values()):
# enable key auth between instances
ssh_client[1].put_file(os.path.join(localpath, connector.key_name + '.pem'),
'/home/{}/.ssh/id_rsa'.format(user))
ssh_client[1].run('chmod 0600 /home/{0}/.ssh/id_rsa'.format(user))
current_path = os.path.dirname(os.path.realpath(__file__))
ssh_client[1].put_file(os.path.join(current_path, 'tests', 'run_postgresql.sh'),
'/tmp/run_postgresql.sh')
ssh_client[1].run('chmod +x /tmp/run_postgresql.sh')
ssh_client[1].run("sed -i 's/\r//' /tmp/run_postgresql.sh")
cmd = '/tmp/run_postgresql.sh {} {} {}'.format(vm_ips[2], user, device)
log.info('Running command {}'.format(cmd))
ssh_client[1].run(cmd, timeout=constants.TIMEOUT * 3)
results_path = os.path.join(localpath, 'postgresql{}_{}.zip'.format(str(time.time()),
instancetype))
ssh_client[1].get_file('/tmp/postgresql.zip', results_path)
except Exception as e:
log.error(e)
raise
finally:
if connector:
connector.teardown()
if results_path:
upload_results(localpath=localpath, table_name='Perf_{}_PostgreSQL'.format(provider),
results_path=results_path, parser=PostgreSQLLogsReader,
test_case_name='{}_PostgreSQL_perf_tuned'.format(provider),
data_path=shortcut.data_path(sriov), host_type=shortcut.host_type(provider),
instance_size=instancetype, disk_setup='1 x SSD {}GB'.format(disk_size))
def test_zookeeper(provider, keyid, secret, token, imageid, subscription, tenant, projectid,
instancetype, user, localpath, region, zone, sriov, kernel):
"""
Run ZooKeeper benchmark on a tree of 5 servers and 1 generating client.
:param provider Service provider to be used e.g. azure, aws, gce.
:param keyid: user key for executing remote connection
:param secret: user secret for executing remote connection
:param token: GCE refresh token obtained with gcloud sdk
:param subscription: Azure specific subscription id
:param tenant: Azure specific tenant id
:param projectid: GCE specific project id
:param imageid: AWS OS AMI image id or
Azure image references offer and sku: e.g. 'UbuntuServer#16.04.0-LTS'.
:param instancetype: AWS instance resource type e.g 'd2.4xlarge' or
Azure hardware profile vm size e.g. 'Standard_DS14_v2'.
:param user: remote ssh user for the instance
:param localpath: localpath where the logs should be downloaded, and the
default path for other necessary tools
:param region: EC2 region to connect to
:param zone: EC2 zone where other resources should be available
:param sriov: Enable or disable SR-IOV
:param kernel: custom kernel name provided in localpath
"""
vm_count = 6
connector, vm_ips, device, ssh_client = setup_env(provider=provider, vm_count=vm_count,
test_type=None, disk_size=None,
raid=False, keyid=keyid, secret=secret,
token=token, subscriptionid=subscription,
tenantid=tenant, projectid=projectid,
imageid=imageid, instancetype=instancetype,
user=user, localpath=localpath,
region=region, zone=zone, sriov=sriov,
kernel=kernel)
results_path = None
try:
if all(client for client in ssh_client.values()):
for i in range(1, 7):
# enable key auth between instances
ssh_client[i].put_file(os.path.join(localpath, connector.key_name + '.pem'),
'/home/{}/.ssh/id_rsa'.format(user))
ssh_client[i].run('chmod 0600 /home/{0}/.ssh/id_rsa'.format(user))
current_path = os.path.dirname(os.path.realpath(__file__))
ssh_client[1].put_file(os.path.join(current_path, 'tests', 'run_zookeeper.sh'),
'/tmp/run_zookeeper.sh')
ssh_client[1].run('chmod +x /tmp/run_zookeeper.sh')
ssh_client[1].run("sed -i 's/\r//' /tmp/run_zookeeper.sh")
zk_servers = ' '.join([vm_ips[i] for i in range(2, 7)])
cmd = '/tmp/run_zookeeper.sh {} {}'.format(user, zk_servers)
log.info('Running command {}'.format(cmd))
ssh_client[1].run(cmd, timeout=constants.TIMEOUT)
results_path = os.path.join(localpath, 'zookeeper{}_{}.zip'.format(str(time.time()),
instancetype))
ssh_client[1].get_file('/tmp/zookeeper.zip', results_path)
except Exception as e:
log.error(e)
raise
finally:
if connector:
connector.teardown()
if results_path:
upload_results(localpath=localpath, table_name='Perf_{}_Zookeeper'.format(provider),
results_path=results_path, parser=ZookeeperLogsReader,
test_case_name='{}_Zookeeper_perf_tuned'.format(provider),
data_path=shortcut.data_path(sriov), host_type=shortcut.host_type(provider),
instance_size=instancetype,
cluster_setup='{} x servers'.format(vm_count - 1))
def test_terasort(provider, keyid, secret, token, imageid, subscription, tenant, projectid,
instancetype, user, localpath, region, zone, sriov, kernel):
"""
Run Hadoop terasort benchmark on a tree of servers using 1 master and
5 slaves instances in VPC to elevate AWS Enhanced Networking.
:param provider Service provider to be used e.g. azure, aws, gce.
:param keyid: user key for executing remote connection
:param secret: user secret for executing remote connection
:param token: GCE refresh token obtained with gcloud sdk
:param subscription: Azure specific subscription id
:param tenant: Azure specific tenant id
:param projectid: GCE specific project id
:param imageid: AWS OS AMI image id or
Azure image references offer and sku: e.g. 'UbuntuServer#16.04.0-LTS'.
:param instancetype: AWS instance resource type e.g 'd2.4xlarge' or
Azure hardware profile vm size e.g. 'Standard_DS14_v2'.
:param user: remote ssh user for the instance
:param localpath: localpath where the logs should be downloaded, and the
default path for other necessary tools
:param region: EC2 region to connect to
:param zone: EC2 zone where other resources should be available
:param sriov: Enable or disable SR-IOV
:param kernel: custom kernel name provided in localpath
"""
vm_count = 6
connector, vm_ips, device, ssh_client = setup_env(provider=provider, vm_count=vm_count,
test_type=constants.CLUSTER_DISK,
disk_size=100, raid=False, keyid=keyid,
secret=secret, token=token,
subscriptionid=subscription, tenantid=tenant,
projectid=projectid, imageid=imageid,
instancetype=instancetype, user=user,
localpath=localpath, region=region,
zone=zone, sriov=sriov, kernel=kernel)
results_path = None
try:
if all(client for client in ssh_client.values()):
for i in range(1, 7):
# enable key auth between instances
ssh_client[i].put_file(os.path.join(localpath, connector.key_name + '.pem'),
'/home/{}/.ssh/id_rsa'.format(user))
ssh_client[i].run('chmod 0600 /home/{0}/.ssh/id_rsa'.format(user))
current_path = os.path.dirname(os.path.realpath(__file__))
ssh_client[1].put_file(os.path.join(current_path, 'tests', 'run_terasort.sh'),
'/tmp/run_terasort.sh')
ssh_client[1].run('chmod +x /tmp/run_terasort.sh')
ssh_client[1].run("sed -i 's/\r//' /tmp/run_terasort.sh")
slaves = ' '.join([vm_ips[i] for i in range(2, 7)])
cmd = '/tmp/run_terasort.sh {} {} {}'.format(user, device, slaves)
log.info('Running command {}'.format(cmd))
ssh_client[1].run(cmd, timeout=constants.TIMEOUT)
results_path = os.path.join(localpath, 'terasort{}_{}.zip'.format(str(time.time()),
instancetype))
ssh_client[1].get_file('/tmp/terasort.zip', results_path)
except Exception as e:
log.error(e)
raise
finally:
if connector:
connector.teardown()
if results_path:
upload_results(localpath=localpath, table_name='Perf_{}_Terasort'.format(provider),
results_path=results_path, parser=TerasortLogsReader,
test_case_name='{}_Terasort_perf_tuned'.format(provider),
data_path=shortcut.data_path(sriov), host_type=shortcut.host_type(provider),
instance_size=instancetype,
cluster_setup='1 master + {} slaves'.format(vm_count - 1))
def test_storage(provider, keyid, secret, token, imageid, subscription, tenant, projectid,
instancetype, user, localpath, region, zone, sriov, kernel):
"""
Run FIO storage profile.
:param provider Service provider to be used e.g. azure, aws, gce.
:param keyid: user key for executing remote connection
:param secret: user secret for executing remote connection
:param token: GCE refresh token obtained with gcloud sdk
:param subscription: Azure specific subscription id
:param tenant: Azure specific tenant id
:param projectid: GCE specific project id
:param imageid: AWS OS AMI image id or
Azure image references offer and sku: e.g. 'UbuntuServer#16.04.0-LTS'.
:param instancetype: AWS instance resource type e.g 'd2.4xlarge' or
Azure hardware profile vm size e.g. 'Standard_DS14_v2'.
:param user: remote ssh user for the instance
:param localpath: localpath where the logs should be downloaded, and the
default path for other necessary tools
:param region: EC2 region to connect to
:param zone: EC2 zone where other resources should be available
:param sriov: Enable or disable SR-IOV
:param kernel: custom kernel name provided in localpath
"""
disk_size = 0
raid = 12
if provider == constants.AWS:
disk_size = 100
elif provider == constants.AZURE:
disk_size = 513
elif provider == constants.GCE:
disk_size = 167
connector, vm_ips, device, ssh_client = setup_env(provider=provider, vm_count=1,
test_type=constants.VM_DISK,
disk_size=disk_size, raid=raid, keyid=keyid,
secret=secret, token=token,
subscriptionid=subscription, tenantid=tenant,
projectid=projectid, imageid=imageid,
instancetype=instancetype, user=user,
localpath=localpath, region=region,
zone=zone, sriov=sriov, kernel=kernel)
results_path = None
try:
if all(client for client in ssh_client.values()):
current_path = os.path.dirname(os.path.realpath(__file__))
ssh_client[1].put_file(os.path.join(current_path, 'tests', 'raid.sh'), '/tmp/raid.sh')
ssh_client[1].run('chmod +x /tmp/raid.sh')
ssh_client[1].run("sed -i 's/\r//' /tmp/raid.sh")
ssh_client[1].run('/tmp/raid.sh 0 {} {}'.format(raid, ' '.join(device)))
ssh_client[1].put_file(os.path.join(current_path, 'tests', 'run_storage.sh'),
'/tmp/run_storage.sh')
ssh_client[1].run('chmod +x /tmp/run_storage.sh')
ssh_client[1].run("sed -i 's/\r//' /tmp/run_storage.sh")
cmd = '/tmp/run_storage.sh {}'.format(constants.RAID_DEV)
log.info('Running command {}'.format(cmd))
ssh_client[1].run(cmd, timeout=constants.TIMEOUT)
results_path = os.path.join(localpath, 'storage{}_{}.zip'.format(str(time.time()),
instancetype))
ssh_client[1].get_file('/tmp/storage.zip', results_path)
except Exception as e:
log.error(e)
raise
finally:
if connector:
connector.teardown()
if results_path:
upload_results(localpath=localpath, table_name='Perf_{}_Storage'.format(provider),
results_path=results_path, parser=StorageLogsReader,
test_case_name='{}_Storage_perf_tuned'.format(provider),
provider=provider, region=region, data_path=shortcut.data_path(sriov),
host_type=shortcut.host_type(provider), instance_size=instancetype,
disk_setup='RAID0:{}x{}G'.format(raid, disk_size))
def test_network_tcp(provider, keyid, secret, token, imageid, subscription, tenant, projectid,
instancetype, user, localpath, region, zone, sriov, kernel):
"""
Run NTTTCP network TCP profile.
:param provider Service provider to be used e.g. azure, aws, gce.
:param keyid: user key for executing remote connection
:param secret: user secret for executing remote connection
:param token: GCE refresh token obtained with gcloud sdk
:param subscription: Azure specific subscription id
:param tenant: Azure specific tenant id
:param projectid: GCE specific project id
:param imageid: AWS OS AMI image id or
Azure image references offer and sku: e.g. 'UbuntuServer#16.04.0-LTS'.
:param instancetype: AWS instance resource type e.g 'd2.4xlarge' or
Azure hardware profile vm size e.g. 'Standard_DS14_v2'.
:param user: remote ssh user for the instance
:param localpath: localpath where the logs should be downloaded, and the
default path for other necessary tools
:param region: EC2 region to connect to
:param zone: EC2 zone where other resources should be available
:param sriov: Enable or disable SR-IOV
:param kernel: custom kernel name provided in localpath
"""
connector, vm_ips, device, ssh_client = setup_env(provider=provider, vm_count=2,
test_type=None, disk_size=None, raid=False,
keyid=keyid, secret=secret, token=token,
subscriptionid=subscription, tenantid=tenant,
projectid=projectid, imageid=imageid,
instancetype=instancetype, user=user,
localpath=localpath, region=region,
zone=zone, sriov=sriov, kernel=kernel)
results_path = None
try:
if all(client for client in ssh_client.values()):
# enable key auth between instances
ssh_client[1].put_file(os.path.join(localpath, connector.key_name + '.pem'),
'/home/{}/.ssh/id_rsa'.format(user))
ssh_client[1].run('chmod 0600 /home/{0}/.ssh/id_rsa'.format(user))
current_path = os.path.dirname(os.path.realpath(__file__))
ssh_client[1].put_file(os.path.join(current_path, 'tests', 'run_network.sh'),
'/tmp/run_network.sh')
ssh_client[1].run('chmod +x /tmp/run_network.sh')
ssh_client[1].run("sed -i 's/\r//' /tmp/run_network.sh")
cmd = '/tmp/run_network.sh {} {} {}'.format(vm_ips[2], user, 'TCP')
log.info('Running command {}'.format(cmd))
ssh_client[1].run(cmd, timeout=constants.TIMEOUT)
results_path = os.path.join(localpath, 'network{}_{}.zip'.format(str(time.time()),
instancetype))
ssh_client[1].get_file('/tmp/network.zip', results_path)
except Exception as e:
log.error(e)
raise
finally:
if connector:
connector.teardown()
if results_path:
upload_results(localpath=localpath, table_name='Perf_{}_Network_TCP'.format(provider),
results_path=results_path, parser=TCPLogsReader,
test_case_name='{}_Network_TCP_perf_tuned'.format(provider),
provider=provider, region=region, data_path=shortcut.data_path(sriov),
host_type=shortcut.host_type(provider), instance_size=instancetype)
def test_network_udp(provider, keyid, secret, token, imageid, subscription, tenant, projectid,
instancetype, user, localpath, region, zone, sriov, kernel):
"""
Run iperf3 UDP network profile.
:param provider Service provider to be used e.g. azure, aws, gce.
:param keyid: user key for executing remote connection
:param secret: user secret for executing remote connection
:param token: GCE refresh token obtained with gcloud sdk
:param subscription: Azure specific subscription id
:param tenant: Azure specific tenant id
:param projectid: GCE specific project id
:param imageid: AWS OS AMI image id or
Azure image references offer and sku: e.g. 'UbuntuServer#16.04.0-LTS'.
:param instancetype: AWS instance resource type e.g 'd2.4xlarge' or
Azure hardware profile vm size e.g. 'Standard_DS14_v2'.
:param user: remote ssh user for the instance
:param localpath: localpath where the logs should be downloaded, and the
default path for other necessary tools
:param region: EC2 region to connect to
:param zone: EC2 zone where other resources should be available
:param sriov: Enable or disable SR-IOV
:param kernel: custom kernel name provided in localpath
"""
connector, vm_ips, device, ssh_client = setup_env(provider=provider, vm_count=2,
test_type=None, disk_size=None, raid=False,
keyid=keyid, secret=secret, token=token,
subscriptionid=subscription, tenantid=tenant,
projectid=projectid, imageid=imageid,
instancetype=instancetype, user=user,
localpath=localpath, region=region,
zone=zone, sriov=sriov, kernel=kernel)
results_path = None
try:
if all(client for client in ssh_client.values()):
# enable key auth between instances
ssh_client[1].put_file(os.path.join(localpath, connector.key_name + '.pem'),
'/home/{}/.ssh/id_rsa'.format(user))
ssh_client[1].run('chmod 0600 /home/{0}/.ssh/id_rsa'.format(user))
current_path = os.path.dirname(os.path.realpath(__file__))
ssh_client[1].put_file(os.path.join(current_path, 'tests', 'run_network.sh'),
'/tmp/run_network.sh')
ssh_client[1].run('chmod +x /tmp/run_network.sh')
ssh_client[1].run("sed -i 's/\r//' /tmp/run_network.sh")
cmd = '/tmp/run_network.sh {} {} {}'.format(vm_ips[2], user, 'UDP')
log.info('Running command {}'.format(cmd))
ssh_client[1].run(cmd, timeout=constants.TIMEOUT)
results_path = os.path.join(localpath, 'network{}_{}.zip'.format(str(time.time()),
instancetype))
ssh_client[1].get_file('/tmp/network.zip', results_path)
except Exception as e:
log.error(e)
raise
finally:
if connector:
connector.teardown()
if results_path:
upload_results(localpath=localpath, table_name='Perf_{}_Network_UDP'.format(provider),
results_path=results_path, parser=UDPLogsReader,
test_case_name='{}_Network_UDP_perf_tuned'.format(provider),
provider=provider, region=region, data_path=shortcut.data_path(sriov),
host_type=shortcut.host_type(provider), instance_size=instancetype)
def test_network_latency(provider, keyid, secret, token, imageid, subscription, tenant, projectid,
instancetype, user, localpath, region, zone, sriov, kernel):
"""
Run lagscope network profile.
:param provider Service provider to be used e.g. azure, aws, gce.
:param keyid: user key for executing remote connection
:param secret: user secret for executing remote connection
:param token: GCE refresh token obtained with gcloud sdk
:param subscription: Azure specific subscription id
:param tenant: Azure specific tenant id
:param projectid: GCE specific project id
:param imageid: AWS OS AMI image id or
Azure image references offer and sku: e.g. 'UbuntuServer#16.04.0-LTS'.
:param instancetype: AWS instance resource type e.g 'd2.4xlarge' or
Azure hardware profile vm size e.g. 'Standard_DS14_v2'.
:param user: remote ssh user for the instance
:param localpath: localpath where the logs should be downloaded, and the
default path for other necessary tools
:param region: EC2 region to connect to
:param zone: EC2 zone where other resources should be available
:param sriov: Enable or disable SR-IOV
:param kernel: custom kernel name provided in localpath
"""
connector, vm_ips, device, ssh_client = setup_env(provider=provider, vm_count=2,
test_type=None, disk_size=None, raid=False,
keyid=keyid, secret=secret, token=token,
subscriptionid=subscription, tenantid=tenant,
projectid=projectid, imageid=imageid,
instancetype=instancetype, user=user,
localpath=localpath, region=region,
zone=zone, sriov=sriov, kernel=kernel)
results_path = None
try:
if all(client for client in ssh_client.values()):
# enable key auth between instances
ssh_client[1].put_file(os.path.join(localpath, connector.key_name + '.pem'),
'/home/{}/.ssh/id_rsa'.format(user))
ssh_client[1].run('chmod 0600 /home/{0}/.ssh/id_rsa'.format(user))
current_path = os.path.dirname(os.path.realpath(__file__))
ssh_client[1].put_file(os.path.join(current_path, 'tests', 'run_network.sh'),
'/tmp/run_network.sh')
ssh_client[1].run('chmod +x /tmp/run_network.sh')
ssh_client[1].run("sed -i 's/\r//' /tmp/run_network.sh")
cmd = '/tmp/run_network.sh {} {} {}'.format(vm_ips[2], user, 'latency')
log.info('Running command {}'.format(cmd))
ssh_client[1].run(cmd, timeout=constants.TIMEOUT)
results_path = os.path.join(localpath, 'network{}_{}.zip'.format(str(time.time()),
instancetype))
ssh_client[1].get_file('/tmp/network.zip', results_path)
except Exception as e:
log.error(e)
raise
finally:
if connector:
connector.teardown()
if results_path:
upload_results(localpath=localpath, table_name='Perf_{}_Network_Latency'.format(provider),
results_path=results_path, parser=LatencyLogsReader,
test_case_name='{}_Network_Latency_perf_tuned'.format(provider),
provider=provider, region=region, data_path=shortcut.data_path(sriov),
host_type=shortcut.host_type(provider), instance_size=instancetype)
def test_network_single_tcp(provider, keyid, secret, token, imageid, subscription, tenant,
projectid, instancetype, user, localpath, region, zone, sriov, kernel):
"""
Run variable TCP buffer network profile for a single connection.
:param provider Service provider to be used e.g. azure, aws, gce.
:param keyid: user key for executing remote connection
:param secret: user secret for executing remote connection
:param token: GCE refresh token obtained with gcloud sdk
:param subscription: Azure specific subscription id
:param tenant: Azure specific tenant id
:param projectid: GCE specific project id
:param imageid: AWS OS AMI image id or
Azure image references offer and sku: e.g. 'UbuntuServer#16.04.0-LTS'.
:param instancetype: AWS instance resource type e.g 'd2.4xlarge' or
Azure hardware profile vm size e.g. 'Standard_DS14_v2'.
:param user: remote ssh user for the instance
:param localpath: localpath where the logs should be downloaded, and the
default path for other necessary tools
:param region: EC2 region to connect to
:param zone: EC2 zone where other resources should be available
:param sriov: Enable or disable SR-IOV
:param kernel: custom kernel name provided in localpath
"""
connector, vm_ips, device, ssh_client = setup_env(provider=provider, vm_count=2,
test_type=None, disk_size=None, raid=False,
keyid=keyid, secret=secret, token=token,
subscriptionid=subscription, tenantid=tenant,
projectid=projectid, imageid=imageid,
instancetype=instancetype, user=user,
localpath=localpath, region=region,
zone=zone, sriov=sriov, kernel=kernel)
results_path = None
try:
if all(client for client in ssh_client.values()):
# enable key auth between instances
ssh_client[1].put_file(os.path.join(localpath, connector.key_name + '.pem'),
'/home/{}/.ssh/id_rsa'.format(user))
ssh_client[1].run('chmod 0600 /home/{0}/.ssh/id_rsa'.format(user))
current_path = os.path.dirname(os.path.realpath(__file__))
ssh_client[1].put_file(os.path.join(current_path, 'tests', 'run_network.sh'),
'/tmp/run_network.sh')
ssh_client[1].run('chmod +x /tmp/run_network.sh')
ssh_client[1].run("sed -i 's/\r//' /tmp/run_network.sh")
cmd = '/tmp/run_network.sh {} {} {}'.format(vm_ips[2], user, 'single_tcp')
log.info('Running command {}'.format(cmd))
ssh_client[1].run(cmd, timeout=constants.TIMEOUT)
results_path = os.path.join(localpath, 'network{}_{}.zip'.format(str(time.time()),
instancetype))
ssh_client[1].get_file('/tmp/network.zip', results_path)
except Exception as e:
log.error(e)
raise
finally:
if connector:
connector.teardown()
if results_path:
upload_results(localpath=localpath,
table_name='Perf_{}_Network_Single_TCP'.format(provider),
results_path=results_path, parser=SingleTCPLogsReader,
test_case_name='{}_Network_Single_TCP_perf_tuned'.format(provider),
provider=provider, region=region, data_path=shortcut.data_path(sriov),
host_type=shortcut.host_type(provider), instance_size=instancetype)
def test_sql_server_inmemdb(provider, keyid, secret, token, imageid, subscription, tenant,
projectid, instancetype, user, localpath, region, zone, sriov, kernel):
"""
Run SQLServer Benchcraft profiling. The test assumes the existence of a *.vm config file and an
Azure windows image prepared with all benchcraft prerequisites (sql scripts, ps scripts and
db flat files). The setup creates a windows and a linux VM for testing specific InMemDB
performance.
:param provider Service provider to be used e.g. azure, aws, gce.
:param keyid: user key for executing remote connection
:param secret: user secret for executing remote connection
:param token: GCE refresh token obtained with gcloud sdk
:param subscription: Azure specific subscription id
:param tenant: Azure specific tenant id
:param projectid: GCE specific project id
:param imageid: AWS OS AMI image id or
Azure image references offer and sku: e.g. 'UbuntuServer#16.04.0-LTS'.
:param instancetype: AWS instance resource type e.g 'd2.4xlarge' or
Azure hardware profile vm size e.g. 'Standard_DS14_v2'.
:param user: remote ssh user for the instance
:param localpath: localpath where the logs should be downloaded, and the
default path for other necessary tools
:param region: EC2 region to connect to
:param zone: EC2 zone where other resources should be available
:param sriov: Enable or disable SR-IOV
:param kernel: custom kernel name provided in localpath
"""
disk_size = 0
if provider == constants.AWS:
disk_size = 200
elif provider == constants.AZURE:
disk_size = 513
connector, vm_ips, device, ssh_client = setup_env(provider=provider, vm_count=1,
test_type=constants.VM_DISK,
disk_size=disk_size, raid=False, keyid=keyid,
secret=secret, token=token,
subscriptionid=subscription, tenantid=tenant,
projectid=projectid, imageid=imageid,
instancetype=instancetype, user=user,
localpath=localpath, region=region,
zone=zone, sriov=sriov, kernel=kernel)
results_path = None
try:
# TODO add Windows VM support for the other cloud providers
win_user, password, win_vm = connector.create_vm(config_file=localpath)
log.info(win_vm)
if all(client for client in ssh_client.values()):
current_path = os.path.dirname(os.path.realpath(__file__))
ssh_client[1].put_file(os.path.join(current_path, 'tests', 'run_sqlserver.sh'),
'/tmp/run_sqlserver.sh')
ssh_client[1].run('chmod +x /tmp/run_sqlserver.sh')
ssh_client[1].run("sed -i 's/\r//' /tmp/run_sqlserver.sh")
cmd = '/tmp/run_sqlserver.sh {} {}'.format(password, constants.DEVICE_AZURE)
log.info('Running command {}'.format(cmd))
ssh_client[1].run(cmd, timeout=constants.TIMEOUT)
results_path = os.path.join(localpath, 'sqlserver{}_{}.zip'.format(str(time.time()),
instancetype))
ssh_client[1].get_file('/tmp/sqlserver.zip', results_path)
log.info(ssh_client[1].run('sudo cat /var/opt/mssql/mssql.conf',
timeout=constants.TIMEOUT))
sqlserver_ip = vm_ips[1]
host = win_vm.name + connector.dns_suffix
winrm_client = WinRMClient(host=host, user=win_user, password=password)
settings = []
sql_sps = []
settings.append('Set-Content -Value ($(Get-Content {ps_path}Settings.ps1 -raw) -replace \'\$SQLServer = \\\".+\\\"\',\'$SQLServer = \\\"{ip}\\\"\') -Path {ps_path}Settings.ps1'.format(
ps_path=constants.PS_PATH, ip=sqlserver_ip))
settings.append('Set-Content -Value ($(Get-Content {ps_path}Settings.ps1 -raw) -replace \'\$SQLPwd = \\\".+\\\"\',\'$SQLPwd = \\\"{passwd}\\\"\') -Path {ps_path}Settings.ps1'.format(
ps_path=constants.PS_PATH, passwd=password))
settings.append('Set-Content -Value ($(Get-Content {bc_path}{bc_profile} -raw) -replace \'(?:sqlserver_ip)\',\'{ip}\') -Path {bc_path}{bc_profile}'.format(
bc_path=constants.BC_PATH, bc_profile=constants.BC_PROFILE, ip=sqlserver_ip))
settings.append('Set-Content -Value ($(Get-Content {bc_path}{bc_profile} -raw) -replace \'(?:sqlserver_pass)\',\'{passwd}\') -Path {bc_path}{bc_profile}'.format(
bc_path=constants.BC_PATH, bc_profile=constants.BC_PROFILE, passwd=password))
for setting in settings:
log.info(setting)
winrm_client.run(cmd=setting, ps=True)
log.info('Creating database.')
winrm_client.run(cmd=shortcut.run_sql('{}Create_Database_InMem.sql'.format(
constants.DB_SQL_PATH), sqlserver_ip, password=password), ps=True)
log.info('Creating InMemDb tables.')
winrm_client.run(cmd=shortcut.run_sql('{}Create_Tables_InMem.sql'.format(
constants.DB_SQL_PATH), sqlserver_ip, password=password, db='InMemDb'), ps=True)
log.info('Loading data into tables.')
winrm_client.run(cmd='{}Load_HK_DB_BCP.ps1'.format(constants.PS_PATH), ps=True)
sql_sps.append('{}InMem_FulfillOrders.sql'.format(constants.SP_SQL_PATH))
sql_sps.append('{}InMem_GetOrdersByCustomerID.sql'.format(constants.SP_SQL_PATH))
sql_sps.append('{}InMem_GetProductsByType.sql'.format(constants.SP_SQL_PATH))
sql_sps.append('{}InMem_GetProductsPriceByPK.sql'.format(constants.SP_SQL_PATH))
sql_sps.append('{}InMem_InsertOrder.sql'.format(constants.SP_SQL_PATH))
sql_sps.append('{}InMem_ProductSelectionCriteria.sql'.format(constants.SP_SQL_PATH))
sql_sps.append('{}optimize_memory.sql'.format(constants.DB_SCRIPTS_PATH))
for sql_sp in sql_sps:
log.info(sql_sp)
winrm_client.run(cmd=shortcut.run_sql(sql_sp, sqlserver_ip, password=password,
db='InMemDb'), ps=True)
# start server collect
ssh_client[1].run('mkdir /tmp/sqlserver_stats', timeout=constants.TIMEOUT)
ssh_client[1].run('nohup sar -n DEV 1 > /tmp/sqlserver_stats/sar.netio.log 2>&1 &',
timeout=constants.TIMEOUT)
ssh_client[1].run('nohup iostat -x -d 1 > /tmp/sqlserver_stats/iostat.diskio.log 2>&1 &',
timeout=constants.TIMEOUT)
ssh_client[1].run('nohup vmstat 1 > /tmp/sqlserver_stats/vmstat.memory.cpu.log 2>&1 &',
timeout=constants.TIMEOUT)
cmd = '{bc_path}start.ps1'.format(bc_path=constants.BC_PATH)
log.info(cmd)
winrm_client.run(cmd=cmd, ps=True)
# collect server stats
ssh_client[1].run('pkill -f sar; pkill -f vmstat; pkill -f iostat',
timeout=constants.TIMEOUT)
ssh_client[1].run('cd /tmp; zip -r sqlserver_stats.zip . -i sqlserver_stats/*',
timeout=constants.TIMEOUT)
ssh_client[1].get_file('/tmp/sqlserver_stats.zip', os.path.join(
localpath, 'sqlserver_stats{}_{}.zip'.format(str(time.time()), instancetype)))
cmd = 'type {}Report1.log'.format(constants.BC_PATH)
report = winrm_client.run(cmd=cmd, ps=True)
except Exception as e:
log.error(e)
raise
finally:
if connector:
connector.teardown()
if results_path:
upload_results(localpath=localpath, table_name='Perf_{}_SQLServer'.format(provider),
results_path=results_path, parser=SQLServerLogsReader,
test_case_name='{}_SQLServer_perf_tuned'.format(provider),
provider=provider, region=region, data_path=shortcut.data_path(sriov),
host_type=shortcut.host_type(provider), instance_size=instancetype,
disk_setup='1 x SSD {}GB'.format(disk_size), report=report)
def test_network_custom(provider, keyid, secret, token, imageid, subscription, tenant,
projectid, instancetype, user, localpath, region, zone, sriov, kernel):
"""
Run variable TCP custom test.
:param provider Service provider to be used e.g. azure, aws, gce.
:param keyid: user key for executing remote connection
:param secret: user secret for executing remote connection
:param token: GCE refresh token obtained with gcloud sdk
:param subscription: Azure specific subscription id
:param tenant: Azure specific tenant id
:param projectid: GCE specific project id
:param imageid: AWS OS AMI image id or
Azure image references offer and sku: e.g. 'UbuntuServer#16.04.0-LTS'.
:param instancetype: AWS instance resource type e.g 'd2.4xlarge' or
Azure hardware profile vm size e.g. 'Standard_DS14_v2'.
:param user: remote ssh user for the instance
:param localpath: localpath where the logs should be downloaded, and the
default path for other necessary tools
:param region: EC2 region to connect to
:param zone: EC2 zone where other resources should be available
:param sriov: Enable or disable SR-IOV
:param kernel: custom kernel name provided in localpath
"""
connector, vm_ips, device, ssh_client = setup_env(provider=provider, vm_count=2,
test_type=None, disk_size=None, raid=False,
keyid=keyid, secret=secret, token=token,
subscriptionid=subscription, tenantid=tenant,
projectid=projectid, imageid=imageid,
instancetype=instancetype, user=user,
localpath=localpath, region=region,
zone=zone, sriov=sriov, kernel=kernel)
try:
if all(client for client in ssh_client.values()):
# enable key auth between instances
ssh_client[1].put_file(os.path.join(localpath, connector.key_name + '.pem'),
'/home/{}/.ssh/id_rsa'.format(user))
ssh_client[1].run('chmod 0600 /home/{0}/.ssh/id_rsa'.format(user))
current_path = os.path.dirname(os.path.realpath(__file__))
ssh_client[1].put_file(os.path.join(current_path, 'tests', 'run_network.sh'),
'/tmp/run_network.sh')
ssh_client[1].run('chmod +x /tmp/run_network.sh')
ssh_client[1].run("sed -i 's/\r//' /tmp/run_network.sh")
cmd = '/tmp/run_network.sh {} {} {}'.format(vm_ips[2], user, 'custom')
log.info('Running command {}'.format(cmd))
ssh_client[1].run(cmd, timeout=constants.TIMEOUT)
results_path = os.path.join(localpath, 'network{}_{}.zip'.format(str(time.time()),
instancetype))
ssh_client[1].get_file('/tmp/network.zip', results_path)
except Exception as e:
log.error(e)
raise
finally:
if connector:
connector.teardown()
def test_scheduler(provider, keyid, secret, token, imageid, subscription, tenant,
projectid, instancetype, user, localpath, region, zone, sriov, kernel):
"""
Run kernel scheduler tests.
:param provider Service provider to be used e.g. azure, aws, gce.
:param keyid: user key for executing remote connection
:param secret: user secret for executing remote connection
:param token: GCE refresh token obtained with gcloud sdk
:param subscription: Azure specific subscription id
:param tenant: Azure specific tenant id
:param projectid: GCE specific project id
:param imageid: AWS OS AMI image id or
Azure image references offer and sku: e.g. 'UbuntuServer#16.04.0-LTS'.
:param instancetype: AWS instance resource type e.g 'd2.4xlarge' or
Azure hardware profile vm size e.g. 'Standard_DS14_v2'.
:param user: remote ssh user for the instance
:param localpath: localpath where the logs should be downloaded, and the
default path for other necessary tools
:param region: EC2 region to connect to
:param zone: EC2 zone where other resources should be available
:param sriov: Enable or disable SR-IOV
:param kernel: custom kernel name provided in localpath
"""
connector, vm_ips, device, ssh_client = setup_env(provider=provider, vm_count=1,
test_type=None, disk_size=None, raid=False,
keyid=keyid, secret=secret, token=token,
subscriptionid=subscription, tenantid=tenant,
projectid=projectid, imageid=imageid,
instancetype=instancetype, user=user,
localpath=localpath, region=region,
zone=zone, sriov=sriov, kernel=kernel)
results_path = None
try:
if all(client for client in ssh_client.values()):
# enable key auth between instances
ssh_client[1].put_file(os.path.join(localpath, connector.key_name + '.pem'),
'/home/{}/.ssh/id_rsa'.format(user))
ssh_client[1].run('chmod 0600 /home/{0}/.ssh/id_rsa'.format(user))
current_path = os.path.dirname(os.path.realpath(__file__))
ssh_client[1].put_file(os.path.join(current_path, 'tests', 'run_scheduler.sh'),
'/tmp/run_scheduler.sh')
ssh_client[1].run('chmod +x /tmp/run_scheduler.sh')
ssh_client[1].run("sed -i 's/\r//' /tmp/run_scheduler.sh")
cmd = '/tmp/run_scheduler.sh {}'.format('all')
log.info('Running command {}'.format(cmd))
ssh_client[1].run(cmd, timeout=constants.TIMEOUT)
results_path = os.path.join(localpath, 'scheduler{}_{}.zip'.format(str(time.time()),
instancetype))
ssh_client[1].get_file('/tmp/scheduler.zip', results_path)
except Exception as e:
log.error(e)
raise
finally:
if connector:
connector.teardown()
if results_path:
upload_results(localpath=localpath, table_name='Perf_{}_Scheduler'.format(provider),
results_path=results_path, parser=SchedulerLogsReader,
test_case_name='{}_Scheduler_perf_tuned'.format(provider),
host_type=shortcut.host_type(provider), instance_size=instancetype)
| 58.022036
| 192
| 0.570061
| 12,592
| 110,590
| 4.869362
| 0.039072
| 0.034934
| 0.026584
| 0.020566
| 0.902031
| 0.896094
| 0.887874
| 0.875707
| 0.863688
| 0.854457
| 0
| 0.012394
| 0.335374
| 110,590
| 1,905
| 193
| 58.052493
| 0.821812
| 0.257998
| 0
| 0.776263
| 0
| 0.004812
| 0.108266
| 0.027849
| 0
| 0
| 0
| 0.00105
| 0
| 1
| 0.019246
| false
| 0.008019
| 0.009623
| 0
| 0.029671
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 7
|
3289da8d6255172735e1778cab8bd3ec700d62d2
| 12,395
|
py
|
Python
|
hallo/modules/channel_control/channel_control.py
|
SpangleLabs/Hallo
|
17145d8f76552ecd4cbc5caef8924bd2cf0cbf24
|
[
"MIT"
] | 1
|
2022-01-27T13:25:01.000Z
|
2022-01-27T13:25:01.000Z
|
hallo/modules/channel_control/channel_control.py
|
joshcoales/Hallo
|
17145d8f76552ecd4cbc5caef8924bd2cf0cbf24
|
[
"MIT"
] | 75
|
2015-09-26T18:07:18.000Z
|
2022-01-04T07:15:11.000Z
|
hallo/modules/channel_control/channel_control.py
|
SpangleLabs/Hallo
|
17145d8f76552ecd4cbc5caef8924bd2cf0cbf24
|
[
"MIT"
] | 1
|
2021-04-10T12:02:47.000Z
|
2021-04-10T12:02:47.000Z
|
from hallo.function import Function
from hallo.inc.commons import Commons
def hallo_has_op(channel):
"""
Checks whether hallo has op in a given channel.
:param channel: channel to check op status for
:type channel: destination.Channel
:return: whether hallo has op
:rtype: bool
"""
server = channel.server
hallo_user = server.get_user_by_address(
server.get_nick().lower(), server.get_nick()
)
hallo_membership = channel.get_membership_by_user(hallo_user)
return hallo_membership.is_op
class ChannelCaps(Function):
"""
Set caps lock for a channel.
"""
def __init__(self):
"""
Constructor
"""
super().__init__()
# Name for use in help listing
self.help_name = "caps lock"
# Names which can be used to address the function
self.names = {
"caps lock",
"channel caps",
"channel caps lock",
"chan caps",
"chan caps lock",
"channelcapslock",
}
# Help documentation, if it's just a single line, can be set here
self.help_docs = "Sets caps lock for channel on or off."
def run(self, event):
# Get server object
server_obj = event.server
# If no arguments given, toggle caps lock in current destination
line_clean = event.command_args.strip()
if line_clean == "":
event.channel.use_caps_lock = not event.channel.use_caps_lock
return event.create_response("Caps lock toggled.")
# If line has 1 argument,
line_split = line_clean.split()
if len(line_split) == 1:
# Check if a boolean was specified
input_bool = Commons.string_to_bool(line_split[0])
if input_bool is not None:
event.channel.use_caps_lock = input_bool
return event.create_response(
"Caps lock set {}.".format({False: "off", True: "on"}[input_bool])
)
# Check if a channel was specified
target_channel = server_obj.get_channel_by_name(line_split[0])
if target_channel.in_channel:
target_channel.use_caps_lock = not target_channel.use_caps_lock
return event.create_response(
"Caps lock toggled in {}.".format(target_channel.name)
)
# Otherwise input unknown
return event.create_response(
"Error, I don't understand your input, "
+ "please specify a channel and whether to turn caps lock on or off."
)
# Otherwise line has 2 or more arguments.
# Check if first argument is boolean
input_bool = Commons.string_to_bool(line_split[0])
target_channel_name = line_split[1]
if input_bool is None:
input_bool = Commons.string_to_bool(line_split[1])
target_channel_name = line_split[0]
if input_bool is None:
return event.create_response(
"Error, I don't understand your input, please specify a channel and "
+ "whether to turn caps lock on or off."
)
target_channel = server_obj.get_channel_by_name(target_channel_name)
if target_channel is None or not target_channel.in_channel:
return event.create_response("Error, I'm not in that channel.")
target_channel.use_caps_lock = input_bool
return event.create_response(
"Caps lock set {} in {}.".format(
{False: "off", True: "on"}[input_bool], target_channel.name
)
)
class ChannelLogging(Function):
"""
Set logging for a channel.
"""
def __init__(self):
"""
Constructor
"""
super().__init__()
# Name for use in help listing
self.help_name = "logging"
# Names which can be used to address the function
self.names = {
"logging",
"channel logging",
"channel log",
"chan logging",
"chan log",
}
# Help documentation, if it's just a single line, can be set here
self.help_docs = "Sets or toggles logging for channel."
def run(self, event):
# Get server object
server_obj = event.server
# If no arguments given, toggle logging in current destination
line_clean = event.command_args.strip()
if line_clean == "":
event.channel.logging = not event.channel.logging
return event.create_response("Logging toggled.")
# If line has 1 argument,
line_split = line_clean.split()
if len(line_split) == 1:
# Check if a boolean was specified
input_bool = Commons.string_to_bool(line_split[0])
if input_bool is not None:
event.channel.logging = input_bool
return event.create_response(
"Logging set {}.".format({False: "off", True: "on"}[input_bool])
)
# Check if a channel was specified
target_channel = server_obj.get_channel_by_name(line_split[0])
if target_channel.in_channel:
target_channel.logging = not target_channel.logging
return event.create_response(
"Logging toggled in {}.".format(target_channel.name)
)
# Otherwise input unknown
return event.create_response(
"Error, I don't understand your input, please specify a channel and "
+ "whether to turn logging on or off."
)
# Otherwise line has 2 or more arguments.
# Check if first argument is boolean
input_bool = Commons.string_to_bool(line_split[0])
target_channel_name = line_split[1]
if input_bool is None:
input_bool = Commons.string_to_bool(line_split[1])
target_channel_name = line_split[0]
if input_bool is None:
return event.create_response(
"Error, I don't understand your input, please specify a channel and "
+ "whether to turn logging on or off."
)
target_channel = server_obj.get_channel_by_name(target_channel_name)
if not target_channel.in_channel:
return event.create_response("Error, I'm not in that channel.")
target_channel.logging = input_bool
return event.create_response(
"Logging set {} in {}.".format(
{False: "off", True: "on"}[input_bool], target_channel.name
)
)
class ChannelPassiveFunctions(Function):
"""
Set whether passive functions are enabled in a channel.
"""
def __init__(self):
"""
Constructor
"""
super().__init__()
# Name for use in help listing
self.help_name = "passive"
# Names which can be used to address the function
self.names = {"passive"}
# Help documentation, if it's just a single line, can be set here
self.help_docs = "Sets or toggles logging for channel."
def get_names(self):
"""Returns the list of names for directly addressing the function"""
self.names = {"passive"}
for chan in ["chan ", "channel ", ""]:
for passive in [
"passive",
"passive func",
"passive funcs",
"passive function",
"passive functions",
]:
self.names.add(chan + passive)
return self.names
def run(self, event):
# Get server object
server_obj = event.server
# If no arguments given, toggle passive functions in current destination
line_clean = event.command_args.strip()
if line_clean == "":
event.channel.passive_enabled = not event.channel.passive_enabled
return event.create_response("Passive functions toggled.")
# If line has 1 argument,
line_split = line_clean.split()
if len(line_split) == 1:
# Check if a boolean was specified
input_bool = Commons.string_to_bool(line_split[0])
if input_bool is not None:
event.channel.passive_enabled = input_bool
return event.create_response(
"Passive functions set {}.".format(
{False: "disabled", True: "enabled"}[input_bool]
)
)
# Check if a channel was specified
target_channel = server_obj.get_channel_by_name(line_split[0])
if target_channel.in_channel:
target_channel.passive_enabled = not target_channel.passive_enabled
return event.create_response(
"Passive functions toggled in {}.".format(target_channel.name)
)
# Otherwise input unknown
return event.create_response(
"Error, I don't understand your input, please specify a channel and "
+ "whether to turn passive functions on or off."
)
# Otherwise line has 2 or more arguments.
# Check if first argument is boolean
input_bool = Commons.string_to_bool(line_split[0])
target_channel_name = line_split[1]
if input_bool is None:
input_bool = Commons.string_to_bool(line_split[1])
target_channel_name = line_split[0]
if input_bool is None:
return event.create_response(
"Error, I don't understand your input, please specify a channel and "
+ "whether to turn passive functions on or off."
)
target_channel = server_obj.get_channel_by_name(target_channel_name)
if not target_channel.in_channel:
return event.create_response("Error, I'm not in that channel.")
target_channel.passive_enabled = input_bool
return event.create_response(
"Passive functions set {} in {}.".format(
"enabled" if input_bool else "disabled", target_channel.name
)
)
class ChannelPassword(Function):
"""
Set password for a channel.
"""
def __init__(self):
"""
Constructor
"""
super().__init__()
# Name for use in help listing
self.help_name = "channel password"
# Names which can be used to address the function
self.names = {"channel password", "chan password", "channel pass", "chan pass"}
# Help documentation, if it's just a single line, can be set here
self.help_docs = "Sets or disables channel password."
def run(self, event):
# Get server object
server_obj = event.server
# If no arguments given, turn the password for current channel off.
line_clean = event.command_args.strip()
if line_clean == "":
event.channel.password = None
return event.create_response("Channel password disabled.")
# If line has 1 argument, set password for current channel
line_split = line_clean.split()
if len(line_split) == 1:
# Check if null was specified
input_null = Commons.is_string_null(line_split[0])
if input_null:
event.channel.password = None
return event.create_response("Channel password disabled.")
else:
event.channel.password = line_split[0]
return event.create_response("Channel password set.")
# Otherwise line has 2 or more arguments.
# Assume first is channel, and second is password.
input_null = Commons.is_string_null(line_split[1])
target_channel_name = line_split[0]
target_channel = server_obj.get_channel_by_name(target_channel_name)
if input_null:
target_channel.password = None
return event.create_response(
"Channel password disabled for {}.".format(target_channel.name)
)
else:
target_channel.password = line_split[1]
return event.create_response(
"Channel password set for {}.".format(target_channel.name)
)
| 39.855305
| 87
| 0.590157
| 1,468
| 12,395
| 4.781335
| 0.09673
| 0.081493
| 0.062972
| 0.092606
| 0.802109
| 0.780168
| 0.778601
| 0.761789
| 0.739422
| 0.73415
| 0
| 0.004216
| 0.330214
| 12,395
| 310
| 88
| 39.983871
| 0.841243
| 0.166357
| 0
| 0.534562
| 0
| 0
| 0.161338
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0.046083
| false
| 0.16129
| 0.009217
| 0
| 0.202765
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
|
0
| 7
|
32aa481b8ab08dffdcceccd009c3d16b01f629d2
| 118
|
py
|
Python
|
python-analysers/src/test/resources/org/jetbrains/research/lupa/pythonAnalysis/imports/analysis/psi/fromImportStatementsData/in_18_several_absolute_from_imports_with_alias.py
|
JetBrains-Research/Lupa
|
c105487621564c60cae17395bf32eb40868ceb89
|
[
"Apache-2.0"
] | 16
|
2022-01-11T00:32:20.000Z
|
2022-03-25T21:40:52.000Z
|
python-analysers/src/test/resources/org/jetbrains/research/lupa/pythonAnalysis/imports/analysis/psi/fromImportStatementsData/in_18_several_absolute_from_imports_with_alias.py
|
nbirillo/Kotlin-Analysis
|
73c3b8a59bf40ed932bb512f30b0ff31f251af40
|
[
"Apache-2.0"
] | 12
|
2021-07-05T11:42:01.000Z
|
2021-12-23T07:57:54.000Z
|
python-analysers/src/test/resources/org/jetbrains/research/lupa/pythonAnalysis/imports/analysis/psi/fromImportStatementsData/in_18_several_absolute_from_imports_with_alias.py
|
nbirillo/Kotlin-Analysis
|
73c3b8a59bf40ed932bb512f30b0ff31f251af40
|
[
"Apache-2.0"
] | 3
|
2021-09-10T13:21:54.000Z
|
2021-11-23T11:37:55.000Z
|
from src.tasks.task1 import utils as u
from src.tasks.task1 import common as c
from src.tasks.task1 import other as o
| 29.5
| 39
| 0.79661
| 24
| 118
| 3.916667
| 0.5
| 0.223404
| 0.382979
| 0.542553
| 0.734043
| 0
| 0
| 0
| 0
| 0
| 0
| 0.03
| 0.152542
| 118
| 3
| 40
| 39.333333
| 0.91
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| true
| 0
| 1
| 0
| 1
| 0
| 1
| 0
| 0
| null | 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 0
| 1
| 0
|
0
| 8
|
de532c1e9e44b0b0d244417266bd3869fa30ba42
| 179
|
py
|
Python
|
hTools2.roboFontExt/lib/Scripts/batch folder/actions.py
|
frankrolf/hTools2_extension
|
9d73b8640c85209853a72f8d4b167768de5e0d60
|
[
"BSD-3-Clause"
] | 2
|
2019-12-18T16:12:07.000Z
|
2019-12-21T01:19:23.000Z
|
hTools2.roboFontExt/lib/Scripts/batch folder/actions.py
|
frankrolf/hTools2_extension
|
9d73b8640c85209853a72f8d4b167768de5e0d60
|
[
"BSD-3-Clause"
] | null | null | null |
hTools2.roboFontExt/lib/Scripts/batch folder/actions.py
|
frankrolf/hTools2_extension
|
9d73b8640c85209853a72f8d4b167768de5e0d60
|
[
"BSD-3-Clause"
] | null | null | null |
# [h] apply actions
import hTools2.dialogs.folder.actions
import importlib
importlib.reload(hTools2.dialogs.folder.actions)
hTools2.dialogs.folder.actions.actionsFolderDialog()
| 22.375
| 52
| 0.832402
| 21
| 179
| 7.095238
| 0.47619
| 0.281879
| 0.402685
| 0.543624
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.017964
| 0.067039
| 179
| 7
| 53
| 25.571429
| 0.874252
| 0.094972
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| true
| 0
| 0.75
| 0
| 0.75
| 0
| 1
| 0
| 0
| null | 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 0
| 1
| 0
|
0
| 8
|
de6c4fd4951e06589b7187b7283cf9f9a35df7f1
| 4,204
|
py
|
Python
|
trees_and_graphs/validate_bst/validate_bst_test.py
|
hanjasn/ctci
|
69c8c65d71e7f6e88b669dc402e64a0cf6223fbf
|
[
"MIT"
] | null | null | null |
trees_and_graphs/validate_bst/validate_bst_test.py
|
hanjasn/ctci
|
69c8c65d71e7f6e88b669dc402e64a0cf6223fbf
|
[
"MIT"
] | null | null | null |
trees_and_graphs/validate_bst/validate_bst_test.py
|
hanjasn/ctci
|
69c8c65d71e7f6e88b669dc402e64a0cf6223fbf
|
[
"MIT"
] | null | null | null |
import unittest
from validate_bst import *
class IsBST(unittest.TestCase):
def test_1(self) -> None:
root = BinaryTreeNode(5)
root.insert_in_order(2)
root.insert_in_order(1)
root.insert_in_order(3)
root.insert_in_order(4)
root.insert_in_order(7)
root.insert_in_order(6)
root.insert_in_order(8)
root.insert_in_order(9)
self.assertTrue(root.is_BST())
def test_2(self) -> None:
root = BinaryTreeNode(5)
root.insert_in_order(2)
root.insert_in_order(1)
root.insert_in_order(3)
root.insert_in_order(4)
root.insert_in_order(7)
root.insert_in_order(6)
root.insert_in_order(8)
root.insert_in_order(9)
root.left.data = 10
self.assertFalse(root.is_BST())
def test_3(self) -> None:
root = BinaryTreeNode(5)
root.insert_in_order(2)
root.insert_in_order(1)
root.insert_in_order(3)
root.insert_in_order(4)
root.insert_in_order(7)
root.insert_in_order(6)
root.insert_in_order(8)
root.insert_in_order(9)
node = root.find(8)
node.right.data = -1
self.assertFalse(root.is_BST())
def test_4(self) -> None:
root = BinaryTreeNode(20)
l = BinaryTreeNode(10)
r = BinaryTreeNode(30)
lr = BinaryTreeNode(25)
root.left = l
root.right = r
l.right = lr
self.assertFalse(root.is_BST())
def test_5(self) -> None:
root = BinaryTreeNode(1)
root.insert_in_order(2)
root.insert_in_order(3)
root.insert_in_order(4)
root.insert_in_order(5)
self.assertTrue(root.is_BST())
def test_6(self) -> None:
root = BinaryTreeNode(5)
root.insert_in_order(4)
root.insert_in_order(3)
root.insert_in_order(2)
root.insert_in_order(1)
self.assertTrue(root.is_BST())
def test_7(self) -> None:
root = BinaryTreeNode(1)
self.assertTrue(root.is_BST())
class IsBSTWithDup(unittest.TestCase):
def test_1(self) -> None:
root = BinaryTreeNode(5)
root.insert_in_order(2)
root.insert_in_order(1)
root.insert_in_order(3)
root.insert_in_order(4)
root.insert_in_order(7)
root.insert_in_order(6)
root.insert_in_order(8)
root.insert_in_order(9)
self.assertTrue(root.is_BST_with_dup())
def test_2(self) -> None:
root = BinaryTreeNode(5)
root.insert_in_order(2)
root.insert_in_order(1)
root.insert_in_order(3)
root.insert_in_order(4)
root.insert_in_order(7)
root.insert_in_order(6)
root.insert_in_order(8)
root.insert_in_order(9)
root.left.data = 10
self.assertFalse(root.is_BST_with_dup())
def test_3(self) -> None:
root = BinaryTreeNode(5)
root.insert_in_order(2)
root.insert_in_order(1)
root.insert_in_order(3)
root.insert_in_order(4)
root.insert_in_order(7)
root.insert_in_order(6)
root.insert_in_order(8)
root.insert_in_order(9)
node = root.find(8)
node.right.data = -1
self.assertFalse(root.is_BST_with_dup())
def test_4(self) -> None:
root = BinaryTreeNode(20)
l = BinaryTreeNode(10)
r = BinaryTreeNode(30)
lr = BinaryTreeNode(25)
root.left = l
root.right = r
l.right = lr
self.assertFalse(root.is_BST_with_dup())
def test_5(self) -> None:
root = BinaryTreeNode(20)
l = BinaryTreeNode(20)
r = BinaryTreeNode(30)
ll = BinaryTreeNode(15)
root.left = l
root.right = r
l.left = ll
self.assertTrue(root.is_BST_with_dup())
def test_6(self) -> None:
root = BinaryTreeNode(20)
l = BinaryTreeNode(10)
r = BinaryTreeNode(30)
lr = BinaryTreeNode(10)
root.left = l
root.right = r
l.right = lr
self.assertFalse(root.is_BST_with_dup())
def test_7(self) -> None:
root = BinaryTreeNode(1)
root.insert_in_order(2)
root.insert_in_order(3)
root.insert_in_order(4)
root.insert_in_order(5)
self.assertTrue(root.is_BST_with_dup())
def test_8(self) -> None:
root = BinaryTreeNode(5)
root.insert_in_order(4)
root.insert_in_order(3)
root.insert_in_order(2)
root.insert_in_order(1)
self.assertTrue(root.is_BST_with_dup())
def test_9(self) -> None:
root = BinaryTreeNode(1)
self.assertTrue(root.is_BST_with_dup())
| 25.634146
| 44
| 0.671741
| 656
| 4,204
| 4.032012
| 0.070122
| 0.241966
| 0.290359
| 0.411342
| 0.95879
| 0.95879
| 0.95879
| 0.928166
| 0.922117
| 0.889603
| 0
| 0.039628
| 0.207659
| 4,204
| 164
| 45
| 25.634146
| 0.754428
| 0
| 0
| 0.931507
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.109589
| 1
| 0.109589
| false
| 0
| 0.013699
| 0
| 0.136986
| 0
| 0
| 0
| 0
| null | 1
| 1
| 1
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 10
|
defa21d3ea4fad6d07f7754becccf322b73642ed
| 10,972
|
py
|
Python
|
python/test_gilded_rose.py
|
upjohnc/gilded-rose-kata
|
fb44b66695cf95bc41da415fb1132e1d0dd0ca95
|
[
"MIT"
] | null | null | null |
python/test_gilded_rose.py
|
upjohnc/gilded-rose-kata
|
fb44b66695cf95bc41da415fb1132e1d0dd0ca95
|
[
"MIT"
] | null | null | null |
python/test_gilded_rose.py
|
upjohnc/gilded-rose-kata
|
fb44b66695cf95bc41da415fb1132e1d0dd0ca95
|
[
"MIT"
] | null | null | null |
import pytest
from gilded_rose import GildedRose, Item
def brie_quality_increase(quality: int) -> int:
return quality + 1
def sell_in_decrease(sell_in: int) -> int:
return sell_in - 1
def default_quality_decrease(quality: int) -> int:
return quality - 1
class TestDefault:
"""
Test basic functionality: decrease sell_in by 1 and decrease quality by one
GIVEN: GildedRose with one Item of type default
WHEN: call update_quality method
THEN: quality should be one less and sell_in should be one less
"""
quality = 10
sell_in = 20
name = 'default'
items = [Item(name, sell_in, quality)]
@pytest.fixture(scope='class')
def create_default(self):
gilded_rose = GildedRose(self.items)
gilded_rose.update_quality()
def test_quality_decrease(self, create_default):
assert self.items[0].quality == default_quality_decrease(self.quality)
def test_sell_in_decrease(self, create_default):
assert self.items[0].sell_in == sell_in_decrease(self.sell_in)
def test_default_quality_2_down():
"""
Test that quality goes down by 2 when sell_in date has gotten to 0
GIVEN: GildedRose with one Item of type default
WHEN: call update_quality method
THEN: quality should be two less
"""
quality = 10
sell_in = 0
name = 'default'
items = [Item(name, sell_in, quality)]
gilded_rose = GildedRose(items)
gilded_rose.update_quality()
assert items[0].quality == quality - 2
def test_quality_never_negative():
"""
Test that quality does not become negative
GIVEN: GildedRose with one Item of type default - quality at 0
WHEN: call update_quality method
THEN: quality should stay at 0
"""
# quality never negative
quality = 0
sell_in = 10
name = 'default'
items = [Item(name, sell_in, quality)]
gilded_rose = GildedRose(items)
gilded_rose.update_quality()
assert items[0].quality == 0
class TestAgedBrieNormal:
"""
Test basic functionality of `aged brie`: quality increases by 1
GIVEN: GildedRose with one Item of type `aged brie`
WHEN: call update_quality method
THEN: quality should be one more and sell_in should be one less
"""
# aged brie - quality goes up by one
quality = 10
sell_in = 20
name = 'aged brie'
items = [Item(name, sell_in, quality)]
@pytest.fixture(scope='class')
def create_aged_brie(self):
gilded_rose = GildedRose(self.items)
gilded_rose.update_quality()
def test_quality_increase(self, create_aged_brie):
assert self.items[0].quality == brie_quality_increase(self.quality)
def test_sell_in_decrease(self, create_aged_brie):
assert self.items[0].sell_in == sell_in_decrease(self.sell_in)
class TestAgedBrieQuality50:
"""
Test basic functionality: decrease sell_in by 1 and decrease quality by one
GIVEN: GildedRose with one Item of type default
WHEN: call update_quality method
THEN: quality should be one less and sell_in should be one less
"""
# aged brie - never over 50
quality = 50
sell_in = 20
name = 'aged brie'
items = [Item(name, sell_in, quality)]
@pytest.fixture(scope='class')
def create_aged_brie(self):
gilded_rose = GildedRose(self.items)
gilded_rose.update_quality()
def test_quality_increase(self, create_aged_brie):
assert self.items[0].quality == self.quality
def test_sell_in_decrease(self, create_aged_brie):
assert self.items[0].sell_in == sell_in_decrease(self.sell_in)
class TestSulfuras:
"""
Test basic functionality: decrease sell_in by 1 and decrease quality by one
GIVEN: GildedRose with one Item of type default
WHEN: call update_quality method
THEN: quality should be one less and sell_in should be one less
"""
# sulfuras - always 80 quality and age doesn't decrease
quality = 10
sell_in = 20
name = 'sulfuras'
items = [Item(name, sell_in, quality)]
@pytest.fixture(scope='class')
def create_sulfuras(self):
gilded_rose = GildedRose(self.items)
gilded_rose.update_quality()
def test_name(self, create_sulfuras):
assert self.items[0].name == self.name
def test_quality(self, create_sulfuras):
assert self.items[0].quality == self.quality
def test_sell_in(self, create_sulfuras):
assert self.items[0].sell_in == self.sell_in
class TestBackstageNormal:
"""
Test backstage normal
GIVEN: GildedRose with one Item of type `backstage passes`
WHEN: call update_quality method
THEN: quality should be one more and sell_in should be one less
"""
# backstage passes Normal
quality = 10
sell_in = 20
name = 'backstage passes'
items = [Item(name, sell_in, quality)]
@pytest.fixture(scope='class')
def create_backstage(self):
gilded_rose = GildedRose(self.items)
gilded_rose.update_quality()
def test_quality_increase(self, create_backstage):
assert self.items[0].quality == self.quality + 1
def test_sell_in_decrease(self, create_backstage):
assert self.items[0].sell_in == self.sell_in - 1
class TestBackstageQuality50:
"""
Test backstage quality not greater than 50
GIVEN: GildedRose with one Item of type `backstage passes` with quality at 50
WHEN: call update_quality method
THEN: quality should stay at 50 and sell_in should be one less
"""
# backstage passes Normal
quality = 50
sell_in = 20
name = 'backstage passes'
items = [Item(name, sell_in, quality)]
@pytest.fixture(scope='class')
def create_backstage(self):
gilded_rose = GildedRose(self.items)
gilded_rose.update_quality()
def test_quality_increase(self, create_backstage):
assert self.items[0].quality == 50
def test_sell_in_decrease(self, create_backstage):
assert self.items[0].sell_in == self.sell_in - 1
class TestBackstage10Day:
"""
Test backstage: quality increase when sell in less than 10
GIVEN: GildedRose with one Item of type backstage with sell_in at 10
WHEN: call update_quality method
THEN: quality should be two more and sell_in should be one less
"""
# backstage passes - 10 days - up 2 quality
quality = 20
sell_in = 10
name = 'backstage passes'
items = [Item(name, sell_in, quality)]
@pytest.fixture(scope='class')
def create_backstage(self):
gilded_rose = GildedRose(self.items)
gilded_rose.update_quality()
def test_quality_increase(self, create_backstage):
assert self.items[0].quality == self.quality + 2
def test_sell_in_decrease(self, create_backstage):
assert self.items[0].sell_in == self.sell_in - 1
class TestBackstage5Day:
"""
Test backstage: quality increase when sell in less than 5
GIVEN: GildedRose with one Item of type backstage with sell_in at 5
WHEN: call update_quality method
THEN: quality should be three more and sell_in should be one less
"""
# backstage passes - 5 days - up 3 quality
quality = 20
sell_in = 5
name = 'backstage passes'
items = [Item(name, sell_in, quality)]
@pytest.fixture(scope='class')
def create_backstage(self):
gilded_rose = GildedRose(self.items)
gilded_rose.update_quality()
def test_quality_increase(self, create_backstage):
assert self.items[0].quality == self.quality + 3
def test_sell_in_decrease(self, create_backstage):
assert self.items[0].sell_in == self.sell_in - 1
class TestBackstage0Day:
"""
Test backstage: quality goes to zero when sell_in has less than or equal to 0
GIVEN: GildedRose with one Item of type backstage with sell_in at 0
WHEN: call update_quality method
THEN: quality should be 0 and sell_in should be one less
"""
# backstage passes - 0 days - quality == 0
quality = 20
sell_in = 0
name = 'backstage passes'
items = [Item(name, sell_in, quality)]
@pytest.fixture(scope='class')
def create_backstage(self):
gilded_rose = GildedRose(self.items)
gilded_rose.update_quality()
def test_quality_increase(self, create_backstage):
assert self.items[0].quality == 0
def test_sell_in_decrease(self, create_backstage):
assert self.items[0].sell_in == self.sell_in - 1
class TestConjured:
"""
Test conjured: decreases by twice the normal rate
GIVEN: GildedRose with one Item of type `conjured`
WHEN: call update_quality method
THEN: quality should be two less and sell_in should be one less
"""
# conjured - quality down 2
quality = 10
sell_in = 20
name = 'conjured'
items = [Item(name, sell_in, quality)]
@pytest.fixture(scope='class')
def create_conjured(self):
gilded_rose = GildedRose(self.items)
gilded_rose.update_quality()
def test_quality_decrease(self, create_conjured):
assert self.items[0].quality == self.quality - 2
def test_sell_in_decrease(self, create_conjured):
assert self.items[0].sell_in == self.sell_in - 1
def test_conjured_quality_4_down():
"""
Test conjured: decreases by twice the normal rate - sell_in has reached 0
GIVEN: GildedRose with one Item of type `conjured` with sell_in at 0
WHEN: call update_quality method
THEN: quality should be four less and sell_in should be one less
"""
quality = 10
sell_in = 0
name = 'conjured'
items = [Item(name, sell_in, quality)]
gilded_rose = GildedRose(items)
gilded_rose.update_quality()
assert items[0].quality == quality - 4
class TestTwoItems:
"""
Test GildedRose handles more than one `Item` and that type is case-insensitive
GIVEN: GildedRose with one Item of type default
WHEN: call update_quality method
THEN: quality should be one less and sell_in should be one less
"""
brie_quality = 10
brie_sell_in = 20
brie_name = 'Aged Brie'
default_quality = 30
default_sell_in = 40
default_name = 'Default'
items = [Item(brie_name, brie_sell_in, brie_quality),
Item(default_name, default_sell_in, default_quality),
]
@pytest.fixture(scope='class')
def create_rose(self):
gilded_rose = GildedRose(self.items)
gilded_rose.update_quality()
def test_quality_brie(self, create_rose):
assert self.items[0].quality == brie_quality_increase(self.brie_quality)
def test_sell_in_brie(self, create_rose):
assert self.items[0].sell_in == sell_in_decrease(self.brie_sell_in)
def test_quality_default(self, create_rose):
assert self.items[1].quality == default_quality_decrease(self.default_quality)
def test_sell_in_default(self, create_rose):
assert self.items[1].sell_in == sell_in_decrease(self.default_sell_in)
| 29.41555
| 86
| 0.686748
| 1,533
| 10,972
| 4.727984
| 0.073059
| 0.081126
| 0.051738
| 0.050773
| 0.827125
| 0.806981
| 0.787666
| 0.765866
| 0.708333
| 0.648731
| 0
| 0.017625
| 0.229493
| 10,972
| 372
| 87
| 29.494624
| 0.839721
| 0.301404
| 0
| 0.645161
| 0
| 0
| 0.029408
| 0
| 0
| 0
| 0
| 0
| 0.150538
| 1
| 0.225806
| false
| 0.026882
| 0.010753
| 0.016129
| 0.564516
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 1
| 0
|
0
| 8
|
7211ebf17c73b2346e531bd2a5d3df4855119a28
| 88
|
py
|
Python
|
AI(BE)/riskout/textrank/__init__.py
|
dev-taewon-kim/ai_web_RISKOUT_BTS
|
6b5650f8ecf068185ad87af62074627aca566018
|
[
"MIT"
] | 1
|
2021-11-04T07:54:58.000Z
|
2021-11-04T07:54:58.000Z
|
AI(BE)/riskout/textrank/__init__.py
|
mslee300/ai_web_RISKOUT_BTS
|
e01b3d27f68a0187b619920bf5c1f2de0cbc0f7d
|
[
"MIT"
] | null | null | null |
AI(BE)/riskout/textrank/__init__.py
|
mslee300/ai_web_RISKOUT_BTS
|
e01b3d27f68a0187b619920bf5c1f2de0cbc0f7d
|
[
"MIT"
] | null | null | null |
from .summarizer import KeywordSummarizer
from .summarizer import KeysentenceSummarizer
| 29.333333
| 45
| 0.886364
| 8
| 88
| 9.75
| 0.625
| 0.358974
| 0.512821
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.090909
| 88
| 2
| 46
| 44
| 0.975
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| true
| 0
| 1
| 0
| 1
| 0
| 1
| 0
| 0
| null | 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 0
| 1
| 0
|
0
| 7
|
72270f632266b9decf2572be31bbea85dfd646fd
| 134
|
py
|
Python
|
mokka/__init__.py
|
syed-saif/Mokka
|
2ba0ed8e202bdf5c04092792ad1f359a692716dd
|
[
"MIT"
] | 2
|
2020-08-04T08:49:15.000Z
|
2020-08-04T16:18:47.000Z
|
mokka/__init__.py
|
syed-saif/Mokka
|
2ba0ed8e202bdf5c04092792ad1f359a692716dd
|
[
"MIT"
] | null | null | null |
mokka/__init__.py
|
syed-saif/Mokka
|
2ba0ed8e202bdf5c04092792ad1f359a692716dd
|
[
"MIT"
] | null | null | null |
from .app import Mokka
from .router import Router
from .templating import render_template
from .templating import render_from_string
| 22.333333
| 42
| 0.843284
| 19
| 134
| 5.789474
| 0.473684
| 0.254545
| 0.363636
| 0.472727
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.126866
| 134
| 6
| 42
| 22.333333
| 0.940171
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| true
| 0
| 1
| 0
| 1
| 0
| 1
| 0
| 0
| null | 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 0
| 1
| 0
|
0
| 8
|
723e551fecdd45552385da95e80916d8b74b56d1
| 325
|
py
|
Python
|
opti_models/models/backbones/__init__.py
|
IlyaDobrynin/opti_models
|
342b87e782650e8803196c52eee2b0d21f1d9576
|
[
"MIT"
] | 62
|
2020-09-07T10:30:09.000Z
|
2021-12-21T21:27:54.000Z
|
opti_models/models/backbones/__init__.py
|
IlyaDobrynin/opti_models
|
342b87e782650e8803196c52eee2b0d21f1d9576
|
[
"MIT"
] | null | null | null |
opti_models/models/backbones/__init__.py
|
IlyaDobrynin/opti_models
|
342b87e782650e8803196c52eee2b0d21f1d9576
|
[
"MIT"
] | 2
|
2020-11-06T14:31:30.000Z
|
2021-09-26T12:16:10.000Z
|
from .efficientnet import *
from .genet import genet_large, genet_normal, genet_small
from .mixnet import *
from .mobilenet import *
from .timm_models.timm_regnet import *
from .timm_models.timm_res2net import *
from .timm_models.timm_resnest import *
from .timm_models.timm_resnet import *
from .torchvision_models import *
| 32.5
| 57
| 0.812308
| 46
| 325
| 5.478261
| 0.347826
| 0.277778
| 0.222222
| 0.31746
| 0.380952
| 0
| 0
| 0
| 0
| 0
| 0
| 0.003484
| 0.116923
| 325
| 9
| 58
| 36.111111
| 0.874564
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| true
| 0
| 1
| 0
| 1
| 0
| 0
| 0
| 0
| null | 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 0
| 1
| 0
|
0
| 7
|
a0d86dbb82499e5f4d8c436b5d049253f6b2d11a
| 1,445
|
py
|
Python
|
build.py
|
soeren-m/pixelpart-plugin-godot3
|
e08b2220ff12ae17e4e037ac164e36aab43fd400
|
[
"MIT"
] | 1
|
2022-03-22T09:21:51.000Z
|
2022-03-22T09:21:51.000Z
|
build.py
|
soeren-m/pixelpart-plugin-godot3
|
e08b2220ff12ae17e4e037ac164e36aab43fd400
|
[
"MIT"
] | null | null | null |
build.py
|
soeren-m/pixelpart-plugin-godot3
|
e08b2220ff12ae17e4e037ac164e36aab43fd400
|
[
"MIT"
] | null | null | null |
import sys
import subprocess
if "platform=windows" in sys.argv:
subprocess.run("scons platform=windows bits=32 target=release generate_bindings=true", shell = True)
subprocess.run("scons platform=windows bits=64 target=release generate_bindings=true", shell = True)
elif "platform=linux" in sys.argv:
subprocess.run("scons platform=linux bits=32 target=release generate_bindings=true", shell = True)
subprocess.run("scons platform=linux bits=64 target=release generate_bindings=true", shell = True)
elif "platform=osx" in sys.argv:
subprocess.run("scons platform=osx bits=64 target=release generate_bindings=true", shell = True)
elif "platform=android" in sys.argv:
subprocess.run("scons platform=android android_arch=armv7 target=release generate_bindings=true", shell = True)
subprocess.run("scons platform=android android_arch=arm64v8 target=release generate_bindings=true", shell = True)
subprocess.run("scons platform=android android_arch=x86 target=release generate_bindings=true", shell = True)
subprocess.run("scons platform=android android_arch=x86_64 target=release generate_bindings=true", shell = True)
elif "platform=ios" in sys.argv:
subprocess.run("scons platform=ios ios_arch=arm64 target=release generate_bindings=true", shell = True)
elif "platform=javascript" in sys.argv:
subprocess.run("scons platform=javascript bits=32 target=release generate_bindings=true", shell = True)
| 57.8
| 117
| 0.781315
| 200
| 1,445
| 5.56
| 0.15
| 0.128597
| 0.178058
| 0.257194
| 0.901079
| 0.901079
| 0.879496
| 0.67446
| 0.67446
| 0.582734
| 0
| 0.01875
| 0.114187
| 1,445
| 25
| 118
| 57.8
| 0.85
| 0
| 0
| 0
| 1
| 0
| 0.608575
| 0.167358
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| true
| 0
| 0.105263
| 0
| 0.105263
| 0
| 0
| 0
| 0
| null | 0
| 0
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
|
0
| 7
|
19ba5cf995a1390225bf312c934058d0f1e6437c
| 4,974
|
py
|
Python
|
ks.py
|
lullabies777/xuanming
|
d87025bffc1138982854020882199935f8cc24f7
|
[
"MIT"
] | null | null | null |
ks.py
|
lullabies777/xuanming
|
d87025bffc1138982854020882199935f8cc24f7
|
[
"MIT"
] | null | null | null |
ks.py
|
lullabies777/xuanming
|
d87025bffc1138982854020882199935f8cc24f7
|
[
"MIT"
] | null | null | null |
from matplotlib import pyplot as plt
import numpy as np
import pandas as pd
def ks_value(preds, labels, n, asc):
# preds is score: asc=1
# preds is prob: asc=0
pred = preds # 预测值
bad = labels # 取1为bad, 0为good
ksds = pd.DataFrame({'bad': bad, 'pred': pred})
ksds['good'] = 1 - ksds.bad
if asc == 1:
ksds1 = ksds.sort_values(by=['pred', 'bad'], ascending=[True, True])
elif asc == 0:
ksds1 = ksds.sort_values(by=['pred', 'bad'], ascending=[False, True])
ksds1.index = range(len(ksds1.pred))
ksds1['cumsum_good1'] = 1.0*ksds1.good.cumsum()/sum(ksds1.good)
ksds1['cumsum_bad1'] = 1.0*ksds1.bad.cumsum()/sum(ksds1.bad)
if asc == 1:
ksds2 = ksds.sort_values(by=['pred', 'bad'], ascending=[True, False])
elif asc == 0:
ksds2 = ksds.sort_values(by=['pred', 'bad'], ascending=[False, False])
ksds2.index = range(len(ksds2.pred))
ksds2['cumsum_good2'] = 1.0*ksds2.good.cumsum()/sum(ksds2.good)
ksds2['cumsum_bad2'] = 1.0*ksds2.bad.cumsum()/sum(ksds2.bad)
# ksds1 ksds2 -> average
ksds = ksds1[['cumsum_good1', 'cumsum_bad1']]
ksds['cumsum_good2'] = ksds2['cumsum_good2']
ksds['cumsum_bad2'] = ksds2['cumsum_bad2']
ksds['cumsum_good'] = (ksds['cumsum_good1'] + ksds['cumsum_good2'])/2
ksds['cumsum_bad'] = (ksds['cumsum_bad1'] + ksds['cumsum_bad2'])/2
# ks
ksds['ks'] = ksds['cumsum_bad'] - ksds['cumsum_good']
ksds['tile0'] = range(1, len(ksds.ks) + 1)
ksds['tile'] = 1.0*ksds['tile0']/len(ksds['tile0'])
qe = list(np.arange(0, 1, 1.0/n))
qe.append(1)
qe = qe[1:]
ks_index = pd.Series(ksds.index)
ks_index = ks_index.quantile(q = qe)
ks_index = np.ceil(ks_index).astype(int)
ks_index = list(ks_index)
ksds = ksds.loc[ks_index]
ksds = ksds[['tile', 'cumsum_good', 'cumsum_bad', 'ks']]
ksds0 = np.array([[0, 0, 0, 0]])
ksds = np.concatenate([ksds0, ksds], axis=0)
ksds = pd.DataFrame(ksds, columns=['tile', 'cumsum_good', 'cumsum_bad', 'ks'])
ks_value = ksds.ks.max()
return ks_value
def plotks(preds, labels, n, asc):
# preds is score: asc=1
# preds is prob: asc=0
pred = preds # 预测值
bad = labels # 取1为bad, 0为good
ksds = pd.DataFrame({'bad': bad, 'pred': pred})
ksds['good'] = 1 - ksds.bad
if asc == 1:
ksds1 = ksds.sort_values(by=['pred', 'bad'], ascending=[True, True])
elif asc == 0:
ksds1 = ksds.sort_values(by=['pred', 'bad'], ascending=[False, True])
ksds1.index = range(len(ksds1.pred))
ksds1['cumsum_good1'] = 1.0*ksds1.good.cumsum()/sum(ksds1.good)
ksds1['cumsum_bad1'] = 1.0*ksds1.bad.cumsum()/sum(ksds1.bad)
if asc == 1:
ksds2 = ksds.sort_values(by=['pred', 'bad'], ascending=[True, False])
elif asc == 0:
ksds2 = ksds.sort_values(by=['pred', 'bad'], ascending=[False, False])
ksds2.index = range(len(ksds2.pred))
ksds2['cumsum_good2'] = 1.0*ksds2.good.cumsum()/sum(ksds2.good)
ksds2['cumsum_bad2'] = 1.0*ksds2.bad.cumsum()/sum(ksds2.bad)
# ksds1 ksds2 -> average
ksds = ksds1[['cumsum_good1', 'cumsum_bad1']]
ksds['cumsum_good2'] = ksds2['cumsum_good2']
ksds['cumsum_bad2'] = ksds2['cumsum_bad2']
ksds['cumsum_good'] = (ksds['cumsum_good1'] + ksds['cumsum_good2'])/2
ksds['cumsum_bad'] = (ksds['cumsum_bad1'] + ksds['cumsum_bad2'])/2
# ks
ksds['ks'] = ksds['cumsum_bad'] - ksds['cumsum_good']
ksds['tile0'] = range(1, len(ksds.ks) + 1)
ksds['tile'] = 1.0*ksds['tile0']/len(ksds['tile0'])
qe = list(np.arange(0, 1, 1.0/n))
qe.append(1)
qe = qe[1:]
ks_index = pd.Series(ksds.index)
ks_index = ks_index.quantile(q = qe)
ks_index = np.ceil(ks_index).astype(int)
ks_index = list(ks_index)
ksds = ksds.loc[ks_index]
ksds = ksds[['tile', 'cumsum_good', 'cumsum_bad', 'ks']]
ksds0 = np.array([[0, 0, 0, 0]])
ksds = np.concatenate([ksds0, ksds], axis=0)
ksds = pd.DataFrame(ksds, columns=['tile', 'cumsum_good', 'cumsum_bad', 'ks'])
ks_value = ksds.ks.max()
ks_pop = ksds.tile[ksds.ks.idxmax()]
print ('ks_value is ' + str(np.round(ks_value, 4)) + ' at pop = ' + str(np.round(ks_pop, 4)))
# chart
plt.plot(ksds.tile, ksds.cumsum_good, label='cum_good',
color='blue', linestyle='-', linewidth=2)
plt.plot(ksds.tile, ksds.cumsum_bad, label='cum_bad',
color='red', linestyle='-', linewidth=2)
plt.plot(ksds.tile, ksds.ks, label='ks',
color='green', linestyle='-', linewidth=2)
plt.axvline(ks_pop, color='gray', linestyle='--')
plt.axhline(ks_value, color='green', linestyle='--')
plt.axhline(ksds.loc[ksds.ks.idxmax(), 'cumsum_good'], color='blue', linestyle='--')
plt.axhline(ksds.loc[ksds.ks.idxmax(),'cumsum_bad'], color='red', linestyle='--')
plt.title('ks=%s ' %np.round(ks_value, 4) +
'at pop=%s' %np.round(ks_pop, 4), fontsize=15)
return ksds
| 24.994975
| 97
| 0.600523
| 734
| 4,974
| 3.952316
| 0.13079
| 0.075836
| 0.038607
| 0.044123
| 0.877973
| 0.869011
| 0.858325
| 0.844536
| 0.818339
| 0.788004
| 0
| 0.041447
| 0.199638
| 4,974
| 199
| 98
| 24.994975
| 0.687265
| 0.036389
| 0
| 0.787879
| 0
| 0
| 0.156485
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0.020202
| false
| 0
| 0.030303
| 0
| 0.070707
| 0.010101
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 7
|
19bcc48ee9fb238f298a5fac6d357b057a29c779
| 4,499
|
py
|
Python
|
test/unit/test_02_utils.py
|
au9ustine/cuda_aes
|
873e6768f34de1ea07fc71fc33475c9cd09843ea
|
[
"BSD-3-Clause"
] | 2
|
2015-06-13T01:44:31.000Z
|
2016-05-16T03:09:21.000Z
|
test/unit/test_02_utils.py
|
au9ustine/cuda_aes
|
873e6768f34de1ea07fc71fc33475c9cd09843ea
|
[
"BSD-3-Clause"
] | 7
|
2015-06-13T02:55:34.000Z
|
2015-07-16T16:29:21.000Z
|
test/unit/test_02_utils.py
|
au9ustine/cuda_aes
|
873e6768f34de1ea07fc71fc33475c9cd09843ea
|
[
"BSD-3-Clause"
] | null | null | null |
import array
import hashlib
import json
import os.path
import ctypes
from ctypes import *
import utils
logger = utils.get_logger('test_02_utils')
def load_shared_library():
my_lib_path = os.path.join(
os.path.dirname(os.path.abspath(__file__)),
'..',
'..',
'src', 'cuda_aes_for_py.so')
assert os.path.exists(my_lib_path) is True
my_lib = CDLL(my_lib_path)
return my_lib
my_lib = load_shared_library()
def test_my_str2bytearray():
dst_len = c_uint32(0x10)
dst = create_string_buffer(0x10)
src = '000102030405060708090a0b0c0d0e0f'
src_len = c_uint32(len(src))
my_lib.str2bytearray(dst, dst_len, src, src_len)
assert dst.raw == '\x00\x01\x02\x03\x04\x05\x06\x07\x08\x09\x0a\x0b\x0c\x0d\x0e\x0f'
def test_my_str2uintarray():
dst_len = c_uint32(0x10)
dst = create_string_buffer(0x10 * 4)
src = '000102030405060708090a0b0c0d0e0f'
src_len = c_uint32(len(src))
my_lib.str2uintarray(dst, dst_len, src, src_len)
assert dst.raw == '\x00\x00\x00\x00\x01\x00\x00\x00\x02\x00\x00\x00\x03\x00\x00\x00\x04\x00\x00\x00\x05\x00\x00\x00\x06\x00\x00\x00\x07\x00\x00\x00\x08\x00\x00\x00\x09\x00\x00\x00\x0a\x00\x00\x00\x0b\x00\x00\x00\x0c\x00\x00\x00\x0d\x00\x00\x00\x0e\x00\x00\x00\x0f\x00\x00\x00'
def test_parse_test_data():
test_data = utils.read_rsp_file('CBC', 'GFSbox', 128)
expected = {'DECRYPT': [{'COUNT': 0, 'PLAINTEXT': 'f34481ec3cc627bacd5dc3fb08f273e6', 'CIPHERTEXT': '0336763e966d92595a567cc9ce537f5e', 'KEY': '00000000000000000000000000000000', 'IV': '00000000000000000000000000000000'}, {'COUNT': 1, 'PLAINTEXT': '9798c4640bad75c7c3227db910174e72', 'CIPHERTEXT': 'a9a1631bf4996954ebc093957b234589', 'KEY': '00000000000000000000000000000000', 'IV': '00000000000000000000000000000000'}, {'COUNT': 2, 'PLAINTEXT': '96ab5c2ff612d9dfaae8c31f30c42168', 'CIPHERTEXT': 'ff4f8391a6a40ca5b25d23bedd44a597', 'KEY': '00000000000000000000000000000000', 'IV': '00000000000000000000000000000000'}, {'COUNT': 3, 'PLAINTEXT': '6a118a874519e64e9963798a503f1d35', 'CIPHERTEXT': 'dc43be40be0e53712f7e2bf5ca707209', 'KEY': '00000000000000000000000000000000', 'IV': '00000000000000000000000000000000'}, {'COUNT': 4, 'PLAINTEXT': 'cb9fceec81286ca3e989bd979b0cb284', 'CIPHERTEXT': '92beedab1895a94faa69b632e5cc47ce', 'KEY': '00000000000000000000000000000000', 'IV': '00000000000000000000000000000000'}, {'COUNT': 5, 'PLAINTEXT': 'b26aeb1874e47ca8358ff22378f09144', 'CIPHERTEXT': '459264f4798f6a78bacb89c15ed3d601', 'KEY': '00000000000000000000000000000000', 'IV': '00000000000000000000000000000000'}, {'COUNT': 6, 'PLAINTEXT': '58c8e00b2631686d54eab84b91f0aca1', 'CIPHERTEXT': '08a4e2efec8a8e3312ca7460b9040bbf', 'KEY': '00000000000000000000000000000000', 'IV': '00000000000000000000000000000000'}], 'ENCRYPT': [{'COUNT': 0, 'PLAINTEXT': 'f34481ec3cc627bacd5dc3fb08f273e6', 'CIPHERTEXT': '0336763e966d92595a567cc9ce537f5e', 'KEY': '00000000000000000000000000000000', 'IV': '00000000000000000000000000000000'}, {'COUNT': 1, 'PLAINTEXT': '9798c4640bad75c7c3227db910174e72', 'CIPHERTEXT': 'a9a1631bf4996954ebc093957b234589', 'KEY': '00000000000000000000000000000000', 'IV': '00000000000000000000000000000000'}, {'COUNT': 2, 'PLAINTEXT': '96ab5c2ff612d9dfaae8c31f30c42168', 'CIPHERTEXT': 'ff4f8391a6a40ca5b25d23bedd44a597', 'KEY': '00000000000000000000000000000000', 'IV': '00000000000000000000000000000000'}, {'COUNT': 3, 'PLAINTEXT': '6a118a874519e64e9963798a503f1d35', 'CIPHERTEXT': 'dc43be40be0e53712f7e2bf5ca707209', 'KEY': '00000000000000000000000000000000', 'IV': '00000000000000000000000000000000'}, {'COUNT': 4, 'PLAINTEXT': 'cb9fceec81286ca3e989bd979b0cb284', 'CIPHERTEXT': '92beedab1895a94faa69b632e5cc47ce', 'KEY': '00000000000000000000000000000000', 'IV': '00000000000000000000000000000000'}, {'COUNT': 5, 'PLAINTEXT': 'b26aeb1874e47ca8358ff22378f09144', 'CIPHERTEXT': '459264f4798f6a78bacb89c15ed3d601', 'KEY': '00000000000000000000000000000000', 'IV': '00000000000000000000000000000000'}, {'COUNT': 6, 'PLAINTEXT': '58c8e00b2631686d54eab84b91f0aca1', 'CIPHERTEXT': '08a4e2efec8a8e3312ca7460b9040bbf', 'KEY': '00000000000000000000000000000000', 'IV': '00000000000000000000000000000000'}]}
expected_str_val = json.dumps(expected, sort_keys=True)
expected_hash_val = hashlib.sha1(expected_str_val).hexdigest()
actual_str_val = json.dumps(utils.parse_rsp_str(test_data), sort_keys=True)
actual_hash_val = hashlib.sha1(actual_str_val).hexdigest()
assert expected_hash_val == actual_hash_val
| 95.723404
| 2,813
| 0.766615
| 423
| 4,499
| 7.983452
| 0.257683
| 0.058632
| 0.045306
| 0.286053
| 0.73438
| 0.73438
| 0.73438
| 0.73438
| 0.73438
| 0.73438
| 0
| 0.414296
| 0.085797
| 4,499
| 46
| 2,814
| 97.804348
| 0.406759
| 0
| 0
| 0.2
| 0
| 0.05
| 0.587464
| 0.483663
| 0
| 0
| 0.003556
| 0
| 0.1
| 1
| 0.1
| false
| 0
| 0.175
| 0
| 0.3
| 0
| 0
| 0
| 0
| null | 0
| 0
| 1
| 0
| 1
| 1
| 1
| 1
| 1
| 0
| 1
| 0
| 0
| 0
| 1
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 10
|
19d5530295608b9e1528957831671c3452dddee4
| 351
|
py
|
Python
|
splunkapi3/model/__init__.py
|
swimlane/splunkapi3
|
9b12f58f17ea97a1fe8c6ff41e4da466b5e13e32
|
[
"MIT"
] | null | null | null |
splunkapi3/model/__init__.py
|
swimlane/splunkapi3
|
9b12f58f17ea97a1fe8c6ff41e4da466b5e13e32
|
[
"MIT"
] | null | null | null |
splunkapi3/model/__init__.py
|
swimlane/splunkapi3
|
9b12f58f17ea97a1fe8c6ff41e4da466b5e13e32
|
[
"MIT"
] | 3
|
2019-05-31T02:20:05.000Z
|
2021-02-22T00:45:53.000Z
|
from splunkapi3.model.search_create import SearchCreate
from splunkapi3.model.view_edit import ViewEdit
from splunkapi3.model.control_action import ControlAction
from splunkapi3.model.options import Options, Direction, SortMode
from splunkapi3.model.output_mode import OutputMode
from splunkapi3.model.search_result_options import SearchResultOptions
| 50.142857
| 70
| 0.88604
| 44
| 351
| 6.931818
| 0.477273
| 0.27541
| 0.37377
| 0.163934
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.018462
| 0.074074
| 351
| 6
| 71
| 58.5
| 0.92
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| true
| 0
| 1
| 0
| 1
| 0
| 0
| 0
| 0
| null | 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 0
| 1
| 0
|
0
| 7
|
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.